From a3d655bbd01442eac6ead280514f4e87208ed8f0 Mon Sep 17 00:00:00 2001 From: amber-Chen-86 Date: Thu, 22 Jan 2026 15:00:08 -0800 Subject: [PATCH 01/18] [Conv authoring] fix pickle issue (#44806) * updated patch pickle * update * format * updated patch * update * fix pylint * updated version and changelog * update --- .../CHANGELOG.md | 5 + .../README.md | 3 +- .../conversations/authoring/_version.py | 2 +- .../conversations/authoring/models/_patch.py | 105 +++++++++++++++--- 4 files changed, 96 insertions(+), 19 deletions(-) diff --git a/sdk/cognitivelanguage/azure-ai-language-conversations-authoring/CHANGELOG.md b/sdk/cognitivelanguage/azure-ai-language-conversations-authoring/CHANGELOG.md index b9becd6d1936..cfd1a5c866c4 100644 --- a/sdk/cognitivelanguage/azure-ai-language-conversations-authoring/CHANGELOG.md +++ b/sdk/cognitivelanguage/azure-ai-language-conversations-authoring/CHANGELOG.md @@ -1,5 +1,10 @@ # Release History +## 1.0.0b4 (2026-01-22) + +### Bugs Fixed +- Changed the continuation token format. + ## 1.0.0b3 (2025-12-09) ### Features Added diff --git a/sdk/cognitivelanguage/azure-ai-language-conversations-authoring/README.md b/sdk/cognitivelanguage/azure-ai-language-conversations-authoring/README.md index 87d93b12509b..70ee00edb2a7 100644 --- a/sdk/cognitivelanguage/azure-ai-language-conversations-authoring/README.md +++ b/sdk/cognitivelanguage/azure-ai-language-conversations-authoring/README.md @@ -40,7 +40,8 @@ This table shows the relationship between SDK versions and supported API version | SDK version | Supported API version of service | | ------------ | --------------------------------- | -| 1.0.0b3 - Latest preview release | 2023-04-01, 2025-11-01, 2025-05-15-preview, 2025-11-15-preview (default) | +| 1.0.0b4 - Latest preview release | 2023-04-01, 2025-11-01, 2025-05-15-preview, 2025-11-15-preview (default) | +| 1.0.0b3 | 2023-04-01, 2025-11-01, 2025-05-15-preview, 2025-11-15-preview (default) | | 1.0.0b2 | 2023-04-01, 2025-05-15-preview, 2025-11-15-preview (default) | | 1.0.0b1 | 2023-04-01, 2024-11-15-preview, 2025-05-15-preview (default) | diff --git a/sdk/cognitivelanguage/azure-ai-language-conversations-authoring/azure/ai/language/conversations/authoring/_version.py b/sdk/cognitivelanguage/azure-ai-language-conversations-authoring/azure/ai/language/conversations/authoring/_version.py index c43fdbc2e239..22553b18fb7e 100644 --- a/sdk/cognitivelanguage/azure-ai-language-conversations-authoring/azure/ai/language/conversations/authoring/_version.py +++ b/sdk/cognitivelanguage/azure-ai-language-conversations-authoring/azure/ai/language/conversations/authoring/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "1.0.0b3" +VERSION = "1.0.0b4" diff --git a/sdk/cognitivelanguage/azure-ai-language-conversations-authoring/azure/ai/language/conversations/authoring/models/_patch.py b/sdk/cognitivelanguage/azure-ai-language-conversations-authoring/azure/ai/language/conversations/authoring/models/_patch.py index 2640def455c6..6a8a6a72b700 100644 --- a/sdk/cognitivelanguage/azure-ai-language-conversations-authoring/azure/ai/language/conversations/authoring/models/_patch.py +++ b/sdk/cognitivelanguage/azure-ai-language-conversations-authoring/azure/ai/language/conversations/authoring/models/_patch.py @@ -54,7 +54,7 @@ JSON = MutableMapping[str, Any] T = TypeVar("T") - +PollingReturnType_co = TypeVar("PollingReturnType_co", covariant=True) class _JobsStrategy(LongRunningOperation): """Interprets job-status responses and tells the poller which URL to use.""" @@ -198,19 +198,54 @@ def _do_get(self, url: str) -> PipelineResponse: # ---- Continuation token support (doc pattern) ---- def get_continuation_token(self) -> str: - import pickle + import json + + if not self._operation: + raise RuntimeError("Polling method not initialized") + + token = { + "version": 1, + "data": { + "polling_url": self._operation.get_polling_url(), + "polling_interval": self._polling_interval, + "path_format_arguments": self._path_format_arguments, + }, + } - return base64.b64encode(pickle.dumps(self._initial_response)).decode("ascii") + raw = json.dumps(token, separators=(",", ":"), ensure_ascii=True).encode("utf-8") + return base64.b64encode(raw).decode("ascii") @classmethod - def from_continuation_token(cls, continuation_token: str, **kwargs: Any) -> Tuple[Any, PipelineResponse, Callable]: - import pickle + def from_continuation_token(cls, continuation_token: str, **kwargs: Any) -> Tuple[Any, Any, Callable[[Any], PollingReturnType_co]]: + try: + client = kwargs["client"] + except KeyError as exc: + raise ValueError("Need kwarg 'client' to be recreated from continuation_token") from exc - client = kwargs["client"] - deserialization_callback = kwargs["deserialization_callback"] - initial_response = pickle.loads(base64.b64decode(continuation_token)) # nosec - return client, initial_response, deserialization_callback + try: + deserialization_callback = kwargs["deserialization_callback"] + except KeyError as exc: + raise ValueError( + "Need kwarg 'deserialization_callback' to be recreated from continuation_token" + ) from exc + + import json + + token = json.loads(base64.b64decode(continuation_token).decode("utf-8")) + + # Validate token schema and version for compatibility + if not isinstance(token, dict) or "version" not in token or "data" not in token: + raise ValueError("Invalid continuation token format.") + if token["version"] != 1: + raise ValueError( + "This continuation token is not compatible with this version. " + "It may have been generated by a different version." + ) + # Extract the state from the "data" field + state = token["data"] + # The file_id and other state can be extracted and used to resume polling + return client, state, deserialization_callback class _AsyncJobsPollingMethod(AsyncPollingMethod): def __init__( @@ -329,18 +364,54 @@ async def _do_get_async(self, url: str) -> PipelineResponse: # ---- Continuation token ---- def get_continuation_token(self) -> str: - import pickle + import json - return base64.b64encode(pickle.dumps(self._initial_response)).decode("ascii") + if not self._operation: + raise RuntimeError("Polling method not initialized") + + token = { + "version": 1, + "data": { + "polling_url": self._operation.get_polling_url(), + "polling_interval": self._polling_interval, + "path_format_arguments": self._path_format_arguments, + }, + } + + raw = json.dumps(token, separators=(",", ":"), ensure_ascii=True).encode("utf-8") + return base64.b64encode(raw).decode("ascii") @classmethod - def from_continuation_token(cls, continuation_token: str, **kwargs: Any) -> Tuple[Any, PipelineResponse, Callable]: - import pickle + def from_continuation_token(cls, continuation_token: str, **kwargs: Any) -> Tuple[Any, Any, Callable[[Any], PollingReturnType_co]]: + try: + client = kwargs["client"] + except KeyError as exc: + raise ValueError("Need kwarg 'client' to be recreated from continuation_token") from exc + + try: + deserialization_callback = kwargs["deserialization_callback"] + except KeyError as exc: + raise ValueError( + "Need kwarg 'deserialization_callback' to be recreated from continuation_token" + ) from exc + + import json + + token = json.loads(base64.b64decode(continuation_token).decode("utf-8")) + + # Validate token schema and version for compatibility + if not isinstance(token, dict) or "version" not in token or "data" not in token: + raise ValueError("Invalid continuation token format.") + if token["version"] != 1: + raise ValueError( + "This continuation token is not compatible with this version. " + "It may have been generated by a different version." + ) - client = kwargs["client"] - deserialization_callback = kwargs["deserialization_callback"] - initial_response = pickle.loads(base64.b64decode(continuation_token)) # nosec - return client, initial_response, deserialization_callback + # Extract the state from the "data" field + state = token["data"] + # The file_id and other state can be extracted and used to resume polling + return client, state, deserialization_callback class CreateDeploymentDetails(_GeneratedCreateDeploymentDetails): """Represents the options for creating or updating a project deployment. From 0c75e9a71855aea346db4723c4ad23a3330d08e9 Mon Sep 17 00:00:00 2001 From: Azure SDK Bot <53356347+azure-sdk@users.noreply.github.com> Date: Thu, 22 Jan 2026 15:08:57 -0800 Subject: [PATCH 02/18] Fix API Change Check (#44808) Co-authored-by: Alitzel Mendez Co-authored-by: Alitzel Mendez <6895254+AlitzelMendez@users.noreply.github.com> --- eng/common/scripts/Helpers/ApiView-Helpers.ps1 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/eng/common/scripts/Helpers/ApiView-Helpers.ps1 b/eng/common/scripts/Helpers/ApiView-Helpers.ps1 index e8d867db9e95..c7b1f313b19e 100644 --- a/eng/common/scripts/Helpers/ApiView-Helpers.ps1 +++ b/eng/common/scripts/Helpers/ApiView-Helpers.ps1 @@ -215,7 +215,7 @@ function Set-ApiViewCommentForPR { try { $existingComment = Get-GitHubIssueComments -RepoOwner $RepoOwner -RepoName $RepoName -IssueNumber $PrNumber -AuthToken $AuthToken $existingAPIViewComment = $existingComment | Where-Object { - $_.body.StartsWith("**API Change Check**", [StringComparison]::OrdinalIgnoreCase) -or $_.body.StartsWith("## API Change Check", [StringComparison]::OrdinalIgnoreCase) } + $_.body.StartsWith("**API Change Check**", [StringComparison]::OrdinalIgnoreCase) -or $_.body.StartsWith("## API Change Check", [StringComparison]::OrdinalIgnoreCase) } | Select-Object -Last 1 } catch { LogWarning "Failed to get comments from Pull Request: $PrNumber in repo: $repoFullName" } From 28ce016d88d4053c366168c9928cb8e70f110f9d Mon Sep 17 00:00:00 2001 From: Scott Beddall <45376673+scbedd@users.noreply.github.com> Date: Thu, 22 Jan 2026 18:29:02 -0800 Subject: [PATCH 03/18] add azure-keyvault-admin to the smoketest set on a core change (#44811) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * add azure-keyvault-admin to the smoketest set on a core change * Update eng/scripts/Language-Settings.ps1 to mirror appropriate keyvault package name Co-authored-by: McCoy PatiƱo <39780829+mccoyp@users.noreply.github.com> Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- eng/scripts/Language-Settings.ps1 | 1 + 1 file changed, 1 insertion(+) diff --git a/eng/scripts/Language-Settings.ps1 b/eng/scripts/Language-Settings.ps1 index d9832d0a852c..b1483d52cbf2 100644 --- a/eng/scripts/Language-Settings.ps1 +++ b/eng/scripts/Language-Settings.ps1 @@ -105,6 +105,7 @@ function Get-python-AdditionalValidationPackagesFromPackageSet { "azure-data-table", "azure-appconfig", "azure-keyvault-keys", + "azure-keyvault-administration", "azure-identity", "azure-mgmt-core", "azure-core-experimental", From bbc7bcd27ded6549056d1fac36ac00729a8c245c Mon Sep 17 00:00:00 2001 From: Pratibha Shrivastav <164305667+PratibhaShrivastav18@users.noreply.github.com> Date: Fri, 23 Jan 2026 12:06:46 +0530 Subject: [PATCH 04/18] Remove multiapi since it's getting deprecated (#44779) * remove multiapi * fix test * update autorest readme to fix issues with latest versions --- .../azure/ai/ml/_restclient/__init__.py | 4 +- .../_azure_machine_learning_workspaces.py | 1422 ----------------- .../azure/ai/ml/_restclient/_configuration.py | 71 - .../azure/ai/ml/_restclient/_version.py | 8 - .../azure/ai/ml/_restclient/aio/__init__.py | 10 - .../aio/_azure_machine_learning_workspaces.py | 1421 ---------------- .../ai/ml/_restclient/aio/_configuration.py | 67 - .../azure/ai/ml/_restclient/models.py | 14 - .../azure/ai/ml/_restclient/py.typed | 1 - .../scripts/regenerate_restclient.py | 2 - .../resource-manager/readme.md | 179 +-- .../unittests/test_pipeline_job_schema.py | 5 + 12 files changed, 33 insertions(+), 3171 deletions(-) delete mode 100644 sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/_azure_machine_learning_workspaces.py delete mode 100644 sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/_configuration.py delete mode 100644 sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/_version.py delete mode 100644 sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/aio/__init__.py delete mode 100644 sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/aio/_azure_machine_learning_workspaces.py delete mode 100644 sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/aio/_configuration.py delete mode 100644 sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/models.py delete mode 100644 sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/py.typed diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/__init__.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/__init__.py index dad2c6eeb01b..6ac0705c7047 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/__init__.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/__init__.py @@ -6,8 +6,8 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from ._azure_machine_learning_workspaces import AzureMachineLearningWorkspaces -__all__ = ['AzureMachineLearningWorkspaces'] +# This package contains versioned API clients in subfolders. +# Import from specific version subfolders like v2022_05_01, v2023_04_01, etc. try: from ._patch import patch_sdk # type: ignore diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/_azure_machine_learning_workspaces.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/_azure_machine_learning_workspaces.py deleted file mode 100644 index c55fb6c432e9..000000000000 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/_azure_machine_learning_workspaces.py +++ /dev/null @@ -1,1422 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from typing import TYPE_CHECKING - -from msrest import Deserializer, Serializer - -from azure.mgmt.core import ARMPipelineClient -from azure.profiles import KnownProfiles, ProfileDefinition -from azure.profiles.multiapiclient import MultiApiClientMixin - -from ._configuration import AzureMachineLearningWorkspacesConfiguration - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Optional - - from azure.core.credentials import TokenCredential - -class _SDKClient(object): - def __init__(self, *args, **kwargs): - """This is a fake class to support current implemetation of MultiApiClientMixin." - Will be removed in final version of multiapi azure-core based client - """ - pass - -class AzureMachineLearningWorkspaces(MultiApiClientMixin, _SDKClient): - """These APIs allow end users to operate on Azure Machine Learning Workspace resources. - - This ready contains multiple API versions, to help you deal with all of the Azure clouds - (Azure Stack, Azure Government, Azure China, etc.). - By default, it uses the latest API version available on public Azure. - For production, you should stick to a particular api-version and/or profile. - The profile sets a mapping between an operation group and its API version. - The api-version parameter sets the default API version if the operation - group is not described in the profile. - - :param credential: Credential needed for the client to connect to Azure. - :type credential: ~azure.core.credentials.TokenCredential - :param subscription_id: The ID of the target subscription. - :type subscription_id: str - :param api_version: API version to use if no profile is provided, or if missing in profile. - :type api_version: str - :param base_url: Service URL - :type base_url: str - :param profile: A profile definition, from KnownProfiles to dict. - :type profile: azure.profiles.KnownProfiles - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - """ - - DEFAULT_API_VERSION = '2022-10-01' - _PROFILE_TAG = "azure.mgmt.machinelearningservices.AzureMachineLearningWorkspaces" - LATEST_PROFILE = ProfileDefinition({ - _PROFILE_TAG: { - None: DEFAULT_API_VERSION, - 'assets': '1.0.0', - 'async_operations': 'v1.0', - 'batch_job_deployment': '2020-09-01-dataplanepreview', - 'batch_job_endpoint': '2020-09-01-dataplanepreview', - 'data_call': '1.5.0', - 'data_container': '1.5.0', - 'data_references': '2021-10-01-dataplanepreview', - 'data_version': '1.5.0', - 'dataset_containers': '2021-10-01', - 'dataset_controller_v2': '1.5.0', - 'dataset_v2': '1.5.0', - 'dataset_versions': '2021-10-01', - 'datasets_v1': '1.5.0', - 'delete': 'v1.0', - 'events': 'v1.0', - 'experiments': 'v1.0', - 'extensive_model': '1.0.0', - 'get_operation_status': '1.5.0', - 'metric': 'v1.0', - 'migration': '1.0.0', - 'models': '1.0.0', - 'registry_management_non_workspace': 'v1.0', - 'resource_management_asset_reference': '2021-10-01-dataplanepreview', - 'run': 'v1.0', - 'run_artifacts': 'v1.0', - 'runs': 'v1.0', - 'spans': 'v1.0', - 'temporary_data_references': '2021-10-01-dataplanepreview', - }}, - _PROFILE_TAG + " latest" - ) - - def __init__( - self, - credential, # type: "TokenCredential" - subscription_id, # type: str - api_version=None, # type: Optional[str] - base_url="https://management.azure.com", # type: str - profile=KnownProfiles.default, # type: KnownProfiles - **kwargs # type: Any - ): - self._config = AzureMachineLearningWorkspacesConfiguration(credential, subscription_id, **kwargs) - self._client = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs) - super(AzureMachineLearningWorkspaces, self).__init__( - api_version=api_version, - profile=profile - ) - - @classmethod - def _models_dict(cls, api_version): - return {k: v for k, v in cls.models(api_version).__dict__.items() if isinstance(v, type)} - - @classmethod - def models(cls, api_version=DEFAULT_API_VERSION): - """Module depends on the API version: - - * 1.5.0: :mod:`dataset_dataplane.models` - * 1.0.0: :mod:`model_dataplane.models` - * v1.0: :mod:`registry_discovery.models` - * v1.0: :mod:`runhistory.models` - * 2020-09-01-dataplanepreview: :mod:`v2020_09_01_dataplanepreview.models` - * 2021-10-01: :mod:`v2021_10_01.models` - * 2021-10-01-dataplanepreview: :mod:`v2021_10_01_dataplanepreview.models` - * 2022-01-01-preview: :mod:`v2022_01_01_preview.models` - * 2022-02-01-preview: :mod:`v2022_02_01_preview.models` - * 2022-05-01: :mod:`v2022_05_01.models` - * 2022-06-01-preview: :mod:`v2022_06_01_preview.models` - * 2022-10-01: :mod:`v2022_10_01.models` - * 2022-10-01-preview: :mod:`v2022_10_01_preview.models` - """ - if api_version == '1.5.0': - from .dataset_dataplane import models - return models - elif api_version == '1.0.0': - from .model_dataplane import models - return models - elif api_version == 'v1.0': - from .registry_discovery import models - return models - elif api_version == 'v1.0': - from .runhistory import models - return models - elif api_version == '2020-09-01-dataplanepreview': - from .v2020_09_01_dataplanepreview import models - return models - elif api_version == '2021-10-01': - from .v2021_10_01 import models - return models - elif api_version == '2021-10-01-dataplanepreview': - from .v2021_10_01_dataplanepreview import models - return models - elif api_version == '2022-01-01-preview': - from .v2022_01_01_preview import models - return models - elif api_version == '2022-02-01-preview': - from .v2022_02_01_preview import models - return models - elif api_version == '2022-05-01': - from .v2022_05_01 import models - return models - elif api_version == '2022-06-01-preview': - from .v2022_06_01_preview import models - return models - elif api_version == '2022-10-01': - from .v2022_10_01 import models - return models - elif api_version == '2022-10-01-preview': - from .v2022_10_01_preview import models - return models - raise ValueError("API version {} is not available".format(api_version)) - - @property - def assets(self): - """Instance depends on the API version: - - * 1.0.0: :class:`AssetsOperations` - """ - api_version = self._get_api_version('assets') - if api_version == '1.0.0': - from .model_dataplane.operations import AssetsOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'assets'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def async_operations(self): - """Instance depends on the API version: - - * v1.0: :class:`AsyncOperationsOperations` - """ - api_version = self._get_api_version('async_operations') - if api_version == 'v1.0': - from .registry_discovery.operations import AsyncOperationsOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'async_operations'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def batch_deployments(self): - """Instance depends on the API version: - - * 2021-10-01: :class:`BatchDeploymentsOperations` - * 2022-02-01-preview: :class:`BatchDeploymentsOperations` - * 2022-05-01: :class:`BatchDeploymentsOperations` - * 2022-06-01-preview: :class:`BatchDeploymentsOperations` - * 2022-10-01: :class:`BatchDeploymentsOperations` - * 2022-10-01-preview: :class:`BatchDeploymentsOperations` - """ - api_version = self._get_api_version('batch_deployments') - if api_version == '2021-10-01': - from .v2021_10_01.operations import BatchDeploymentsOperations as OperationClass - elif api_version == '2022-02-01-preview': - from .v2022_02_01_preview.operations import BatchDeploymentsOperations as OperationClass - elif api_version == '2022-05-01': - from .v2022_05_01.operations import BatchDeploymentsOperations as OperationClass - elif api_version == '2022-06-01-preview': - from .v2022_06_01_preview.operations import BatchDeploymentsOperations as OperationClass - elif api_version == '2022-10-01': - from .v2022_10_01.operations import BatchDeploymentsOperations as OperationClass - elif api_version == '2022-10-01-preview': - from .v2022_10_01_preview.operations import BatchDeploymentsOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'batch_deployments'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def batch_endpoints(self): - """Instance depends on the API version: - - * 2021-10-01: :class:`BatchEndpointsOperations` - * 2022-02-01-preview: :class:`BatchEndpointsOperations` - * 2022-05-01: :class:`BatchEndpointsOperations` - * 2022-06-01-preview: :class:`BatchEndpointsOperations` - * 2022-10-01: :class:`BatchEndpointsOperations` - * 2022-10-01-preview: :class:`BatchEndpointsOperations` - """ - api_version = self._get_api_version('batch_endpoints') - if api_version == '2021-10-01': - from .v2021_10_01.operations import BatchEndpointsOperations as OperationClass - elif api_version == '2022-02-01-preview': - from .v2022_02_01_preview.operations import BatchEndpointsOperations as OperationClass - elif api_version == '2022-05-01': - from .v2022_05_01.operations import BatchEndpointsOperations as OperationClass - elif api_version == '2022-06-01-preview': - from .v2022_06_01_preview.operations import BatchEndpointsOperations as OperationClass - elif api_version == '2022-10-01': - from .v2022_10_01.operations import BatchEndpointsOperations as OperationClass - elif api_version == '2022-10-01-preview': - from .v2022_10_01_preview.operations import BatchEndpointsOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'batch_endpoints'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def batch_job_deployment(self): - """Instance depends on the API version: - - * 2020-09-01-dataplanepreview: :class:`BatchJobDeploymentOperations` - """ - api_version = self._get_api_version('batch_job_deployment') - if api_version == '2020-09-01-dataplanepreview': - from .v2020_09_01_dataplanepreview.operations import BatchJobDeploymentOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'batch_job_deployment'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def batch_job_endpoint(self): - """Instance depends on the API version: - - * 2020-09-01-dataplanepreview: :class:`BatchJobEndpointOperations` - """ - api_version = self._get_api_version('batch_job_endpoint') - if api_version == '2020-09-01-dataplanepreview': - from .v2020_09_01_dataplanepreview.operations import BatchJobEndpointOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'batch_job_endpoint'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def code_containers(self): - """Instance depends on the API version: - - * 2021-10-01: :class:`CodeContainersOperations` - * 2021-10-01-dataplanepreview: :class:`CodeContainersOperations` - * 2022-02-01-preview: :class:`CodeContainersOperations` - * 2022-05-01: :class:`CodeContainersOperations` - * 2022-06-01-preview: :class:`CodeContainersOperations` - * 2022-10-01: :class:`CodeContainersOperations` - * 2022-10-01-preview: :class:`CodeContainersOperations` - """ - api_version = self._get_api_version('code_containers') - if api_version == '2021-10-01': - from .v2021_10_01.operations import CodeContainersOperations as OperationClass - elif api_version == '2021-10-01-dataplanepreview': - from .v2021_10_01_dataplanepreview.operations import CodeContainersOperations as OperationClass - elif api_version == '2022-02-01-preview': - from .v2022_02_01_preview.operations import CodeContainersOperations as OperationClass - elif api_version == '2022-05-01': - from .v2022_05_01.operations import CodeContainersOperations as OperationClass - elif api_version == '2022-06-01-preview': - from .v2022_06_01_preview.operations import CodeContainersOperations as OperationClass - elif api_version == '2022-10-01': - from .v2022_10_01.operations import CodeContainersOperations as OperationClass - elif api_version == '2022-10-01-preview': - from .v2022_10_01_preview.operations import CodeContainersOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'code_containers'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def code_versions(self): - """Instance depends on the API version: - - * 2021-10-01: :class:`CodeVersionsOperations` - * 2021-10-01-dataplanepreview: :class:`CodeVersionsOperations` - * 2022-02-01-preview: :class:`CodeVersionsOperations` - * 2022-05-01: :class:`CodeVersionsOperations` - * 2022-06-01-preview: :class:`CodeVersionsOperations` - * 2022-10-01: :class:`CodeVersionsOperations` - * 2022-10-01-preview: :class:`CodeVersionsOperations` - """ - api_version = self._get_api_version('code_versions') - if api_version == '2021-10-01': - from .v2021_10_01.operations import CodeVersionsOperations as OperationClass - elif api_version == '2021-10-01-dataplanepreview': - from .v2021_10_01_dataplanepreview.operations import CodeVersionsOperations as OperationClass - elif api_version == '2022-02-01-preview': - from .v2022_02_01_preview.operations import CodeVersionsOperations as OperationClass - elif api_version == '2022-05-01': - from .v2022_05_01.operations import CodeVersionsOperations as OperationClass - elif api_version == '2022-06-01-preview': - from .v2022_06_01_preview.operations import CodeVersionsOperations as OperationClass - elif api_version == '2022-10-01': - from .v2022_10_01.operations import CodeVersionsOperations as OperationClass - elif api_version == '2022-10-01-preview': - from .v2022_10_01_preview.operations import CodeVersionsOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'code_versions'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def component_containers(self): - """Instance depends on the API version: - - * 2021-10-01: :class:`ComponentContainersOperations` - * 2021-10-01-dataplanepreview: :class:`ComponentContainersOperations` - * 2022-02-01-preview: :class:`ComponentContainersOperations` - * 2022-05-01: :class:`ComponentContainersOperations` - * 2022-06-01-preview: :class:`ComponentContainersOperations` - * 2022-10-01: :class:`ComponentContainersOperations` - * 2022-10-01-preview: :class:`ComponentContainersOperations` - """ - api_version = self._get_api_version('component_containers') - if api_version == '2021-10-01': - from .v2021_10_01.operations import ComponentContainersOperations as OperationClass - elif api_version == '2021-10-01-dataplanepreview': - from .v2021_10_01_dataplanepreview.operations import ComponentContainersOperations as OperationClass - elif api_version == '2022-02-01-preview': - from .v2022_02_01_preview.operations import ComponentContainersOperations as OperationClass - elif api_version == '2022-05-01': - from .v2022_05_01.operations import ComponentContainersOperations as OperationClass - elif api_version == '2022-06-01-preview': - from .v2022_06_01_preview.operations import ComponentContainersOperations as OperationClass - elif api_version == '2022-10-01': - from .v2022_10_01.operations import ComponentContainersOperations as OperationClass - elif api_version == '2022-10-01-preview': - from .v2022_10_01_preview.operations import ComponentContainersOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'component_containers'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def component_versions(self): - """Instance depends on the API version: - - * 2021-10-01: :class:`ComponentVersionsOperations` - * 2021-10-01-dataplanepreview: :class:`ComponentVersionsOperations` - * 2022-02-01-preview: :class:`ComponentVersionsOperations` - * 2022-05-01: :class:`ComponentVersionsOperations` - * 2022-06-01-preview: :class:`ComponentVersionsOperations` - * 2022-10-01: :class:`ComponentVersionsOperations` - * 2022-10-01-preview: :class:`ComponentVersionsOperations` - """ - api_version = self._get_api_version('component_versions') - if api_version == '2021-10-01': - from .v2021_10_01.operations import ComponentVersionsOperations as OperationClass - elif api_version == '2021-10-01-dataplanepreview': - from .v2021_10_01_dataplanepreview.operations import ComponentVersionsOperations as OperationClass - elif api_version == '2022-02-01-preview': - from .v2022_02_01_preview.operations import ComponentVersionsOperations as OperationClass - elif api_version == '2022-05-01': - from .v2022_05_01.operations import ComponentVersionsOperations as OperationClass - elif api_version == '2022-06-01-preview': - from .v2022_06_01_preview.operations import ComponentVersionsOperations as OperationClass - elif api_version == '2022-10-01': - from .v2022_10_01.operations import ComponentVersionsOperations as OperationClass - elif api_version == '2022-10-01-preview': - from .v2022_10_01_preview.operations import ComponentVersionsOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'component_versions'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def compute(self): - """Instance depends on the API version: - - * 2021-10-01: :class:`ComputeOperations` - * 2022-01-01-preview: :class:`ComputeOperations` - * 2022-05-01: :class:`ComputeOperations` - * 2022-10-01: :class:`ComputeOperations` - * 2022-10-01-preview: :class:`ComputeOperations` - """ - api_version = self._get_api_version('compute') - if api_version == '2021-10-01': - from .v2021_10_01.operations import ComputeOperations as OperationClass - elif api_version == '2022-01-01-preview': - from .v2022_01_01_preview.operations import ComputeOperations as OperationClass - elif api_version == '2022-05-01': - from .v2022_05_01.operations import ComputeOperations as OperationClass - elif api_version == '2022-10-01': - from .v2022_10_01.operations import ComputeOperations as OperationClass - elif api_version == '2022-10-01-preview': - from .v2022_10_01_preview.operations import ComputeOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'compute'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def data_call(self): - """Instance depends on the API version: - - * 1.5.0: :class:`DataCallOperations` - """ - api_version = self._get_api_version('data_call') - if api_version == '1.5.0': - from .dataset_dataplane.operations import DataCallOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'data_call'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def data_container(self): - """Instance depends on the API version: - - * 1.5.0: :class:`DataContainerOperations` - """ - api_version = self._get_api_version('data_container') - if api_version == '1.5.0': - from .dataset_dataplane.operations import DataContainerOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'data_container'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def data_containers(self): - """Instance depends on the API version: - - * 2021-10-01-dataplanepreview: :class:`DataContainersOperations` - * 2022-02-01-preview: :class:`DataContainersOperations` - * 2022-05-01: :class:`DataContainersOperations` - * 2022-06-01-preview: :class:`DataContainersOperations` - * 2022-10-01: :class:`DataContainersOperations` - * 2022-10-01-preview: :class:`DataContainersOperations` - """ - api_version = self._get_api_version('data_containers') - if api_version == '2021-10-01-dataplanepreview': - from .v2021_10_01_dataplanepreview.operations import DataContainersOperations as OperationClass - elif api_version == '2022-02-01-preview': - from .v2022_02_01_preview.operations import DataContainersOperations as OperationClass - elif api_version == '2022-05-01': - from .v2022_05_01.operations import DataContainersOperations as OperationClass - elif api_version == '2022-06-01-preview': - from .v2022_06_01_preview.operations import DataContainersOperations as OperationClass - elif api_version == '2022-10-01': - from .v2022_10_01.operations import DataContainersOperations as OperationClass - elif api_version == '2022-10-01-preview': - from .v2022_10_01_preview.operations import DataContainersOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'data_containers'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def data_references(self): - """Instance depends on the API version: - - * 2021-10-01-dataplanepreview: :class:`DataReferencesOperations` - """ - api_version = self._get_api_version('data_references') - if api_version == '2021-10-01-dataplanepreview': - from .v2021_10_01_dataplanepreview.operations import DataReferencesOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'data_references'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def data_version(self): - """Instance depends on the API version: - - * 1.5.0: :class:`DataVersionOperations` - """ - api_version = self._get_api_version('data_version') - if api_version == '1.5.0': - from .dataset_dataplane.operations import DataVersionOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'data_version'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def data_versions(self): - """Instance depends on the API version: - - * 2021-10-01-dataplanepreview: :class:`DataVersionsOperations` - * 2022-02-01-preview: :class:`DataVersionsOperations` - * 2022-05-01: :class:`DataVersionsOperations` - * 2022-06-01-preview: :class:`DataVersionsOperations` - * 2022-10-01: :class:`DataVersionsOperations` - * 2022-10-01-preview: :class:`DataVersionsOperations` - """ - api_version = self._get_api_version('data_versions') - if api_version == '2021-10-01-dataplanepreview': - from .v2021_10_01_dataplanepreview.operations import DataVersionsOperations as OperationClass - elif api_version == '2022-02-01-preview': - from .v2022_02_01_preview.operations import DataVersionsOperations as OperationClass - elif api_version == '2022-05-01': - from .v2022_05_01.operations import DataVersionsOperations as OperationClass - elif api_version == '2022-06-01-preview': - from .v2022_06_01_preview.operations import DataVersionsOperations as OperationClass - elif api_version == '2022-10-01': - from .v2022_10_01.operations import DataVersionsOperations as OperationClass - elif api_version == '2022-10-01-preview': - from .v2022_10_01_preview.operations import DataVersionsOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'data_versions'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def dataset_containers(self): - """Instance depends on the API version: - - * 2021-10-01: :class:`DatasetContainersOperations` - """ - api_version = self._get_api_version('dataset_containers') - if api_version == '2021-10-01': - from .v2021_10_01.operations import DatasetContainersOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'dataset_containers'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def dataset_controller_v2(self): - """Instance depends on the API version: - - * 1.5.0: :class:`DatasetControllerV2Operations` - """ - api_version = self._get_api_version('dataset_controller_v2') - if api_version == '1.5.0': - from .dataset_dataplane.operations import DatasetControllerV2Operations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'dataset_controller_v2'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def dataset_v2(self): - """Instance depends on the API version: - - * 1.5.0: :class:`DatasetV2Operations` - """ - api_version = self._get_api_version('dataset_v2') - if api_version == '1.5.0': - from .dataset_dataplane.operations import DatasetV2Operations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'dataset_v2'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def dataset_versions(self): - """Instance depends on the API version: - - * 2021-10-01: :class:`DatasetVersionsOperations` - """ - api_version = self._get_api_version('dataset_versions') - if api_version == '2021-10-01': - from .v2021_10_01.operations import DatasetVersionsOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'dataset_versions'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def datasets_v1(self): - """Instance depends on the API version: - - * 1.5.0: :class:`DatasetsV1Operations` - """ - api_version = self._get_api_version('datasets_v1') - if api_version == '1.5.0': - from .dataset_dataplane.operations import DatasetsV1Operations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'datasets_v1'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def datastores(self): - """Instance depends on the API version: - - * 2021-10-01: :class:`DatastoresOperations` - * 2022-02-01-preview: :class:`DatastoresOperations` - * 2022-05-01: :class:`DatastoresOperations` - * 2022-06-01-preview: :class:`DatastoresOperations` - * 2022-10-01: :class:`DatastoresOperations` - * 2022-10-01-preview: :class:`DatastoresOperations` - """ - api_version = self._get_api_version('datastores') - if api_version == '2021-10-01': - from .v2021_10_01.operations import DatastoresOperations as OperationClass - elif api_version == '2022-02-01-preview': - from .v2022_02_01_preview.operations import DatastoresOperations as OperationClass - elif api_version == '2022-05-01': - from .v2022_05_01.operations import DatastoresOperations as OperationClass - elif api_version == '2022-06-01-preview': - from .v2022_06_01_preview.operations import DatastoresOperations as OperationClass - elif api_version == '2022-10-01': - from .v2022_10_01.operations import DatastoresOperations as OperationClass - elif api_version == '2022-10-01-preview': - from .v2022_10_01_preview.operations import DatastoresOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'datastores'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def delete(self): - """Instance depends on the API version: - - * 1.5.0: :class:`DeleteOperations` - * v1.0: :class:`DeleteOperations` - """ - api_version = self._get_api_version('delete') - if api_version == '1.5.0': - from .dataset_dataplane.operations import DeleteOperations as OperationClass - elif api_version == 'v1.0': - from .runhistory.operations import DeleteOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'delete'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def environment_containers(self): - """Instance depends on the API version: - - * 2021-10-01: :class:`EnvironmentContainersOperations` - * 2021-10-01-dataplanepreview: :class:`EnvironmentContainersOperations` - * 2022-02-01-preview: :class:`EnvironmentContainersOperations` - * 2022-05-01: :class:`EnvironmentContainersOperations` - * 2022-06-01-preview: :class:`EnvironmentContainersOperations` - * 2022-10-01: :class:`EnvironmentContainersOperations` - * 2022-10-01-preview: :class:`EnvironmentContainersOperations` - """ - api_version = self._get_api_version('environment_containers') - if api_version == '2021-10-01': - from .v2021_10_01.operations import EnvironmentContainersOperations as OperationClass - elif api_version == '2021-10-01-dataplanepreview': - from .v2021_10_01_dataplanepreview.operations import EnvironmentContainersOperations as OperationClass - elif api_version == '2022-02-01-preview': - from .v2022_02_01_preview.operations import EnvironmentContainersOperations as OperationClass - elif api_version == '2022-05-01': - from .v2022_05_01.operations import EnvironmentContainersOperations as OperationClass - elif api_version == '2022-06-01-preview': - from .v2022_06_01_preview.operations import EnvironmentContainersOperations as OperationClass - elif api_version == '2022-10-01': - from .v2022_10_01.operations import EnvironmentContainersOperations as OperationClass - elif api_version == '2022-10-01-preview': - from .v2022_10_01_preview.operations import EnvironmentContainersOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'environment_containers'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def environment_versions(self): - """Instance depends on the API version: - - * 2021-10-01: :class:`EnvironmentVersionsOperations` - * 2021-10-01-dataplanepreview: :class:`EnvironmentVersionsOperations` - * 2022-02-01-preview: :class:`EnvironmentVersionsOperations` - * 2022-05-01: :class:`EnvironmentVersionsOperations` - * 2022-06-01-preview: :class:`EnvironmentVersionsOperations` - * 2022-10-01: :class:`EnvironmentVersionsOperations` - * 2022-10-01-preview: :class:`EnvironmentVersionsOperations` - """ - api_version = self._get_api_version('environment_versions') - if api_version == '2021-10-01': - from .v2021_10_01.operations import EnvironmentVersionsOperations as OperationClass - elif api_version == '2021-10-01-dataplanepreview': - from .v2021_10_01_dataplanepreview.operations import EnvironmentVersionsOperations as OperationClass - elif api_version == '2022-02-01-preview': - from .v2022_02_01_preview.operations import EnvironmentVersionsOperations as OperationClass - elif api_version == '2022-05-01': - from .v2022_05_01.operations import EnvironmentVersionsOperations as OperationClass - elif api_version == '2022-06-01-preview': - from .v2022_06_01_preview.operations import EnvironmentVersionsOperations as OperationClass - elif api_version == '2022-10-01': - from .v2022_10_01.operations import EnvironmentVersionsOperations as OperationClass - elif api_version == '2022-10-01-preview': - from .v2022_10_01_preview.operations import EnvironmentVersionsOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'environment_versions'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def events(self): - """Instance depends on the API version: - - * v1.0: :class:`EventsOperations` - """ - api_version = self._get_api_version('events') - if api_version == 'v1.0': - from .runhistory.operations import EventsOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'events'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def experiments(self): - """Instance depends on the API version: - - * v1.0: :class:`ExperimentsOperations` - """ - api_version = self._get_api_version('experiments') - if api_version == 'v1.0': - from .runhistory.operations import ExperimentsOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'experiments'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def extensive_model(self): - """Instance depends on the API version: - - * 1.0.0: :class:`ExtensiveModelOperations` - """ - api_version = self._get_api_version('extensive_model') - if api_version == '1.0.0': - from .model_dataplane.operations import ExtensiveModelOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'extensive_model'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def get_operation_status(self): - """Instance depends on the API version: - - * 1.5.0: :class:`GetOperationStatusOperations` - """ - api_version = self._get_api_version('get_operation_status') - if api_version == '1.5.0': - from .dataset_dataplane.operations import GetOperationStatusOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'get_operation_status'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def jobs(self): - """Instance depends on the API version: - - * 2021-10-01: :class:`JobsOperations` - * 2022-02-01-preview: :class:`JobsOperations` - * 2022-05-01: :class:`JobsOperations` - * 2022-06-01-preview: :class:`JobsOperations` - * 2022-10-01: :class:`JobsOperations` - * 2022-10-01-preview: :class:`JobsOperations` - """ - api_version = self._get_api_version('jobs') - if api_version == '2021-10-01': - from .v2021_10_01.operations import JobsOperations as OperationClass - elif api_version == '2022-02-01-preview': - from .v2022_02_01_preview.operations import JobsOperations as OperationClass - elif api_version == '2022-05-01': - from .v2022_05_01.operations import JobsOperations as OperationClass - elif api_version == '2022-06-01-preview': - from .v2022_06_01_preview.operations import JobsOperations as OperationClass - elif api_version == '2022-10-01': - from .v2022_10_01.operations import JobsOperations as OperationClass - elif api_version == '2022-10-01-preview': - from .v2022_10_01_preview.operations import JobsOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'jobs'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def labeling_jobs(self): - """Instance depends on the API version: - - * 2022-06-01-preview: :class:`LabelingJobsOperations` - * 2022-10-01-preview: :class:`LabelingJobsOperations` - """ - api_version = self._get_api_version('labeling_jobs') - if api_version == '2022-06-01-preview': - from .v2022_06_01_preview.operations import LabelingJobsOperations as OperationClass - elif api_version == '2022-10-01-preview': - from .v2022_10_01_preview.operations import LabelingJobsOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'labeling_jobs'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def metric(self): - """Instance depends on the API version: - - * v1.0: :class:`MetricOperations` - """ - api_version = self._get_api_version('metric') - if api_version == 'v1.0': - from .runhistory.operations import MetricOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'metric'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def migration(self): - """Instance depends on the API version: - - * 1.0.0: :class:`MigrationOperations` - """ - api_version = self._get_api_version('migration') - if api_version == '1.0.0': - from .model_dataplane.operations import MigrationOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'migration'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def model_containers(self): - """Instance depends on the API version: - - * 2021-10-01: :class:`ModelContainersOperations` - * 2021-10-01-dataplanepreview: :class:`ModelContainersOperations` - * 2022-02-01-preview: :class:`ModelContainersOperations` - * 2022-05-01: :class:`ModelContainersOperations` - * 2022-06-01-preview: :class:`ModelContainersOperations` - * 2022-10-01: :class:`ModelContainersOperations` - * 2022-10-01-preview: :class:`ModelContainersOperations` - """ - api_version = self._get_api_version('model_containers') - if api_version == '2021-10-01': - from .v2021_10_01.operations import ModelContainersOperations as OperationClass - elif api_version == '2021-10-01-dataplanepreview': - from .v2021_10_01_dataplanepreview.operations import ModelContainersOperations as OperationClass - elif api_version == '2022-02-01-preview': - from .v2022_02_01_preview.operations import ModelContainersOperations as OperationClass - elif api_version == '2022-05-01': - from .v2022_05_01.operations import ModelContainersOperations as OperationClass - elif api_version == '2022-06-01-preview': - from .v2022_06_01_preview.operations import ModelContainersOperations as OperationClass - elif api_version == '2022-10-01': - from .v2022_10_01.operations import ModelContainersOperations as OperationClass - elif api_version == '2022-10-01-preview': - from .v2022_10_01_preview.operations import ModelContainersOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'model_containers'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def model_versions(self): - """Instance depends on the API version: - - * 2021-10-01: :class:`ModelVersionsOperations` - * 2021-10-01-dataplanepreview: :class:`ModelVersionsOperations` - * 2022-02-01-preview: :class:`ModelVersionsOperations` - * 2022-05-01: :class:`ModelVersionsOperations` - * 2022-06-01-preview: :class:`ModelVersionsOperations` - * 2022-10-01: :class:`ModelVersionsOperations` - * 2022-10-01-preview: :class:`ModelVersionsOperations` - """ - api_version = self._get_api_version('model_versions') - if api_version == '2021-10-01': - from .v2021_10_01.operations import ModelVersionsOperations as OperationClass - elif api_version == '2021-10-01-dataplanepreview': - from .v2021_10_01_dataplanepreview.operations import ModelVersionsOperations as OperationClass - elif api_version == '2022-02-01-preview': - from .v2022_02_01_preview.operations import ModelVersionsOperations as OperationClass - elif api_version == '2022-05-01': - from .v2022_05_01.operations import ModelVersionsOperations as OperationClass - elif api_version == '2022-06-01-preview': - from .v2022_06_01_preview.operations import ModelVersionsOperations as OperationClass - elif api_version == '2022-10-01': - from .v2022_10_01.operations import ModelVersionsOperations as OperationClass - elif api_version == '2022-10-01-preview': - from .v2022_10_01_preview.operations import ModelVersionsOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'model_versions'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def models(self): - """Instance depends on the API version: - - * 1.0.0: :class:`ModelsOperations` - """ - api_version = self._get_api_version('models') - if api_version == '1.0.0': - from .model_dataplane.operations import ModelsOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'models'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def online_deployments(self): - """Instance depends on the API version: - - * 2021-10-01: :class:`OnlineDeploymentsOperations` - * 2022-02-01-preview: :class:`OnlineDeploymentsOperations` - * 2022-05-01: :class:`OnlineDeploymentsOperations` - * 2022-06-01-preview: :class:`OnlineDeploymentsOperations` - * 2022-10-01: :class:`OnlineDeploymentsOperations` - * 2022-10-01-preview: :class:`OnlineDeploymentsOperations` - """ - api_version = self._get_api_version('online_deployments') - if api_version == '2021-10-01': - from .v2021_10_01.operations import OnlineDeploymentsOperations as OperationClass - elif api_version == '2022-02-01-preview': - from .v2022_02_01_preview.operations import OnlineDeploymentsOperations as OperationClass - elif api_version == '2022-05-01': - from .v2022_05_01.operations import OnlineDeploymentsOperations as OperationClass - elif api_version == '2022-06-01-preview': - from .v2022_06_01_preview.operations import OnlineDeploymentsOperations as OperationClass - elif api_version == '2022-10-01': - from .v2022_10_01.operations import OnlineDeploymentsOperations as OperationClass - elif api_version == '2022-10-01-preview': - from .v2022_10_01_preview.operations import OnlineDeploymentsOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'online_deployments'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def online_endpoints(self): - """Instance depends on the API version: - - * 2021-10-01: :class:`OnlineEndpointsOperations` - * 2022-02-01-preview: :class:`OnlineEndpointsOperations` - * 2022-05-01: :class:`OnlineEndpointsOperations` - * 2022-06-01-preview: :class:`OnlineEndpointsOperations` - * 2022-10-01: :class:`OnlineEndpointsOperations` - * 2022-10-01-preview: :class:`OnlineEndpointsOperations` - """ - api_version = self._get_api_version('online_endpoints') - if api_version == '2021-10-01': - from .v2021_10_01.operations import OnlineEndpointsOperations as OperationClass - elif api_version == '2022-02-01-preview': - from .v2022_02_01_preview.operations import OnlineEndpointsOperations as OperationClass - elif api_version == '2022-05-01': - from .v2022_05_01.operations import OnlineEndpointsOperations as OperationClass - elif api_version == '2022-06-01-preview': - from .v2022_06_01_preview.operations import OnlineEndpointsOperations as OperationClass - elif api_version == '2022-10-01': - from .v2022_10_01.operations import OnlineEndpointsOperations as OperationClass - elif api_version == '2022-10-01-preview': - from .v2022_10_01_preview.operations import OnlineEndpointsOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'online_endpoints'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def operations(self): - """Instance depends on the API version: - - * 2021-10-01: :class:`Operations` - * 2022-01-01-preview: :class:`Operations` - * 2022-05-01: :class:`Operations` - * 2022-10-01: :class:`Operations` - * 2022-10-01-preview: :class:`Operations` - """ - api_version = self._get_api_version('operations') - if api_version == '2021-10-01': - from .v2021_10_01.operations import Operations as OperationClass - elif api_version == '2022-01-01-preview': - from .v2022_01_01_preview.operations import Operations as OperationClass - elif api_version == '2022-05-01': - from .v2022_05_01.operations import Operations as OperationClass - elif api_version == '2022-10-01': - from .v2022_10_01.operations import Operations as OperationClass - elif api_version == '2022-10-01-preview': - from .v2022_10_01_preview.operations import Operations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'operations'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def private_endpoint_connections(self): - """Instance depends on the API version: - - * 2021-10-01: :class:`PrivateEndpointConnectionsOperations` - * 2022-01-01-preview: :class:`PrivateEndpointConnectionsOperations` - * 2022-05-01: :class:`PrivateEndpointConnectionsOperations` - * 2022-10-01: :class:`PrivateEndpointConnectionsOperations` - * 2022-10-01-preview: :class:`PrivateEndpointConnectionsOperations` - """ - api_version = self._get_api_version('private_endpoint_connections') - if api_version == '2021-10-01': - from .v2021_10_01.operations import PrivateEndpointConnectionsOperations as OperationClass - elif api_version == '2022-01-01-preview': - from .v2022_01_01_preview.operations import PrivateEndpointConnectionsOperations as OperationClass - elif api_version == '2022-05-01': - from .v2022_05_01.operations import PrivateEndpointConnectionsOperations as OperationClass - elif api_version == '2022-10-01': - from .v2022_10_01.operations import PrivateEndpointConnectionsOperations as OperationClass - elif api_version == '2022-10-01-preview': - from .v2022_10_01_preview.operations import PrivateEndpointConnectionsOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'private_endpoint_connections'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def private_link_resources(self): - """Instance depends on the API version: - - * 2021-10-01: :class:`PrivateLinkResourcesOperations` - * 2022-01-01-preview: :class:`PrivateLinkResourcesOperations` - * 2022-05-01: :class:`PrivateLinkResourcesOperations` - * 2022-10-01: :class:`PrivateLinkResourcesOperations` - * 2022-10-01-preview: :class:`PrivateLinkResourcesOperations` - """ - api_version = self._get_api_version('private_link_resources') - if api_version == '2021-10-01': - from .v2021_10_01.operations import PrivateLinkResourcesOperations as OperationClass - elif api_version == '2022-01-01-preview': - from .v2022_01_01_preview.operations import PrivateLinkResourcesOperations as OperationClass - elif api_version == '2022-05-01': - from .v2022_05_01.operations import PrivateLinkResourcesOperations as OperationClass - elif api_version == '2022-10-01': - from .v2022_10_01.operations import PrivateLinkResourcesOperations as OperationClass - elif api_version == '2022-10-01-preview': - from .v2022_10_01_preview.operations import PrivateLinkResourcesOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'private_link_resources'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def quotas(self): - """Instance depends on the API version: - - * 2021-10-01: :class:`QuotasOperations` - * 2022-01-01-preview: :class:`QuotasOperations` - * 2022-05-01: :class:`QuotasOperations` - * 2022-10-01: :class:`QuotasOperations` - * 2022-10-01-preview: :class:`QuotasOperations` - """ - api_version = self._get_api_version('quotas') - if api_version == '2021-10-01': - from .v2021_10_01.operations import QuotasOperations as OperationClass - elif api_version == '2022-01-01-preview': - from .v2022_01_01_preview.operations import QuotasOperations as OperationClass - elif api_version == '2022-05-01': - from .v2022_05_01.operations import QuotasOperations as OperationClass - elif api_version == '2022-10-01': - from .v2022_10_01.operations import QuotasOperations as OperationClass - elif api_version == '2022-10-01-preview': - from .v2022_10_01_preview.operations import QuotasOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'quotas'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def registries(self): - """Instance depends on the API version: - - * 2022-10-01-preview: :class:`RegistriesOperations` - """ - api_version = self._get_api_version('registries') - if api_version == '2022-10-01-preview': - from .v2022_10_01_preview.operations import RegistriesOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'registries'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def registry_code_containers(self): - """Instance depends on the API version: - - * 2022-10-01-preview: :class:`RegistryCodeContainersOperations` - """ - api_version = self._get_api_version('registry_code_containers') - if api_version == '2022-10-01-preview': - from .v2022_10_01_preview.operations import RegistryCodeContainersOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'registry_code_containers'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def registry_code_versions(self): - """Instance depends on the API version: - - * 2022-10-01-preview: :class:`RegistryCodeVersionsOperations` - """ - api_version = self._get_api_version('registry_code_versions') - if api_version == '2022-10-01-preview': - from .v2022_10_01_preview.operations import RegistryCodeVersionsOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'registry_code_versions'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def registry_component_containers(self): - """Instance depends on the API version: - - * 2022-10-01-preview: :class:`RegistryComponentContainersOperations` - """ - api_version = self._get_api_version('registry_component_containers') - if api_version == '2022-10-01-preview': - from .v2022_10_01_preview.operations import RegistryComponentContainersOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'registry_component_containers'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def registry_component_versions(self): - """Instance depends on the API version: - - * 2022-10-01-preview: :class:`RegistryComponentVersionsOperations` - """ - api_version = self._get_api_version('registry_component_versions') - if api_version == '2022-10-01-preview': - from .v2022_10_01_preview.operations import RegistryComponentVersionsOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'registry_component_versions'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def registry_environment_containers(self): - """Instance depends on the API version: - - * 2022-10-01-preview: :class:`RegistryEnvironmentContainersOperations` - """ - api_version = self._get_api_version('registry_environment_containers') - if api_version == '2022-10-01-preview': - from .v2022_10_01_preview.operations import RegistryEnvironmentContainersOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'registry_environment_containers'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def registry_environment_versions(self): - """Instance depends on the API version: - - * 2022-10-01-preview: :class:`RegistryEnvironmentVersionsOperations` - """ - api_version = self._get_api_version('registry_environment_versions') - if api_version == '2022-10-01-preview': - from .v2022_10_01_preview.operations import RegistryEnvironmentVersionsOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'registry_environment_versions'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def registry_management_non_workspace(self): - """Instance depends on the API version: - - * v1.0: :class:`RegistryManagementNonWorkspaceOperations` - """ - api_version = self._get_api_version('registry_management_non_workspace') - if api_version == 'v1.0': - from .registry_discovery.operations import RegistryManagementNonWorkspaceOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'registry_management_non_workspace'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def registry_model_containers(self): - """Instance depends on the API version: - - * 2022-10-01-preview: :class:`RegistryModelContainersOperations` - """ - api_version = self._get_api_version('registry_model_containers') - if api_version == '2022-10-01-preview': - from .v2022_10_01_preview.operations import RegistryModelContainersOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'registry_model_containers'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def registry_model_versions(self): - """Instance depends on the API version: - - * 2022-10-01-preview: :class:`RegistryModelVersionsOperations` - """ - api_version = self._get_api_version('registry_model_versions') - if api_version == '2022-10-01-preview': - from .v2022_10_01_preview.operations import RegistryModelVersionsOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'registry_model_versions'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def resource_management_asset_reference(self): - """Instance depends on the API version: - - * 2021-10-01-dataplanepreview: :class:`ResourceManagementAssetReferenceOperations` - """ - api_version = self._get_api_version('resource_management_asset_reference') - if api_version == '2021-10-01-dataplanepreview': - from .v2021_10_01_dataplanepreview.operations import ResourceManagementAssetReferenceOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'resource_management_asset_reference'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def run(self): - """Instance depends on the API version: - - * v1.0: :class:`RunOperations` - """ - api_version = self._get_api_version('run') - if api_version == 'v1.0': - from .runhistory.operations import RunOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'run'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def run_artifacts(self): - """Instance depends on the API version: - - * v1.0: :class:`RunArtifactsOperations` - """ - api_version = self._get_api_version('run_artifacts') - if api_version == 'v1.0': - from .runhistory.operations import RunArtifactsOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'run_artifacts'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def runs(self): - """Instance depends on the API version: - - * v1.0: :class:`RunsOperations` - """ - api_version = self._get_api_version('runs') - if api_version == 'v1.0': - from .runhistory.operations import RunsOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'runs'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def schedules(self): - """Instance depends on the API version: - - * 2022-06-01-preview: :class:`SchedulesOperations` - * 2022-10-01: :class:`SchedulesOperations` - * 2022-10-01-preview: :class:`SchedulesOperations` - """ - api_version = self._get_api_version('schedules') - if api_version == '2022-06-01-preview': - from .v2022_06_01_preview.operations import SchedulesOperations as OperationClass - elif api_version == '2022-10-01': - from .v2022_10_01.operations import SchedulesOperations as OperationClass - elif api_version == '2022-10-01-preview': - from .v2022_10_01_preview.operations import SchedulesOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'schedules'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def spans(self): - """Instance depends on the API version: - - * v1.0: :class:`SpansOperations` - """ - api_version = self._get_api_version('spans') - if api_version == 'v1.0': - from .runhistory.operations import SpansOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'spans'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def temporary_data_references(self): - """Instance depends on the API version: - - * 2021-10-01-dataplanepreview: :class:`TemporaryDataReferencesOperations` - """ - api_version = self._get_api_version('temporary_data_references') - if api_version == '2021-10-01-dataplanepreview': - from .v2021_10_01_dataplanepreview.operations import TemporaryDataReferencesOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'temporary_data_references'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def usages(self): - """Instance depends on the API version: - - * 2021-10-01: :class:`UsagesOperations` - * 2022-01-01-preview: :class:`UsagesOperations` - * 2022-05-01: :class:`UsagesOperations` - * 2022-10-01: :class:`UsagesOperations` - * 2022-10-01-preview: :class:`UsagesOperations` - """ - api_version = self._get_api_version('usages') - if api_version == '2021-10-01': - from .v2021_10_01.operations import UsagesOperations as OperationClass - elif api_version == '2022-01-01-preview': - from .v2022_01_01_preview.operations import UsagesOperations as OperationClass - elif api_version == '2022-05-01': - from .v2022_05_01.operations import UsagesOperations as OperationClass - elif api_version == '2022-10-01': - from .v2022_10_01.operations import UsagesOperations as OperationClass - elif api_version == '2022-10-01-preview': - from .v2022_10_01_preview.operations import UsagesOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'usages'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def virtual_machine_sizes(self): - """Instance depends on the API version: - - * 2021-10-01: :class:`VirtualMachineSizesOperations` - * 2022-01-01-preview: :class:`VirtualMachineSizesOperations` - * 2022-05-01: :class:`VirtualMachineSizesOperations` - * 2022-10-01: :class:`VirtualMachineSizesOperations` - * 2022-10-01-preview: :class:`VirtualMachineSizesOperations` - """ - api_version = self._get_api_version('virtual_machine_sizes') - if api_version == '2021-10-01': - from .v2021_10_01.operations import VirtualMachineSizesOperations as OperationClass - elif api_version == '2022-01-01-preview': - from .v2022_01_01_preview.operations import VirtualMachineSizesOperations as OperationClass - elif api_version == '2022-05-01': - from .v2022_05_01.operations import VirtualMachineSizesOperations as OperationClass - elif api_version == '2022-10-01': - from .v2022_10_01.operations import VirtualMachineSizesOperations as OperationClass - elif api_version == '2022-10-01-preview': - from .v2022_10_01_preview.operations import VirtualMachineSizesOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'virtual_machine_sizes'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def workspace_connections(self): - """Instance depends on the API version: - - * 2021-10-01: :class:`WorkspaceConnectionsOperations` - * 2022-01-01-preview: :class:`WorkspaceConnectionsOperations` - * 2022-05-01: :class:`WorkspaceConnectionsOperations` - * 2022-10-01: :class:`WorkspaceConnectionsOperations` - * 2022-10-01-preview: :class:`WorkspaceConnectionsOperations` - """ - api_version = self._get_api_version('workspace_connections') - if api_version == '2021-10-01': - from .v2021_10_01.operations import WorkspaceConnectionsOperations as OperationClass - elif api_version == '2022-01-01-preview': - from .v2022_01_01_preview.operations import WorkspaceConnectionsOperations as OperationClass - elif api_version == '2022-05-01': - from .v2022_05_01.operations import WorkspaceConnectionsOperations as OperationClass - elif api_version == '2022-10-01': - from .v2022_10_01.operations import WorkspaceConnectionsOperations as OperationClass - elif api_version == '2022-10-01-preview': - from .v2022_10_01_preview.operations import WorkspaceConnectionsOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'workspace_connections'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def workspace_features(self): - """Instance depends on the API version: - - * 2021-10-01: :class:`WorkspaceFeaturesOperations` - * 2022-01-01-preview: :class:`WorkspaceFeaturesOperations` - * 2022-05-01: :class:`WorkspaceFeaturesOperations` - * 2022-10-01: :class:`WorkspaceFeaturesOperations` - * 2022-10-01-preview: :class:`WorkspaceFeaturesOperations` - """ - api_version = self._get_api_version('workspace_features') - if api_version == '2021-10-01': - from .v2021_10_01.operations import WorkspaceFeaturesOperations as OperationClass - elif api_version == '2022-01-01-preview': - from .v2022_01_01_preview.operations import WorkspaceFeaturesOperations as OperationClass - elif api_version == '2022-05-01': - from .v2022_05_01.operations import WorkspaceFeaturesOperations as OperationClass - elif api_version == '2022-10-01': - from .v2022_10_01.operations import WorkspaceFeaturesOperations as OperationClass - elif api_version == '2022-10-01-preview': - from .v2022_10_01_preview.operations import WorkspaceFeaturesOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'workspace_features'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def workspaces(self): - """Instance depends on the API version: - - * 2021-10-01: :class:`WorkspacesOperations` - * 2022-01-01-preview: :class:`WorkspacesOperations` - * 2022-05-01: :class:`WorkspacesOperations` - * 2022-10-01: :class:`WorkspacesOperations` - * 2022-10-01-preview: :class:`WorkspacesOperations` - """ - api_version = self._get_api_version('workspaces') - if api_version == '2021-10-01': - from .v2021_10_01.operations import WorkspacesOperations as OperationClass - elif api_version == '2022-01-01-preview': - from .v2022_01_01_preview.operations import WorkspacesOperations as OperationClass - elif api_version == '2022-05-01': - from .v2022_05_01.operations import WorkspacesOperations as OperationClass - elif api_version == '2022-10-01': - from .v2022_10_01.operations import WorkspacesOperations as OperationClass - elif api_version == '2022-10-01-preview': - from .v2022_10_01_preview.operations import WorkspacesOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'workspaces'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - def close(self): - self._client.close() - def __enter__(self): - self._client.__enter__() - return self - def __exit__(self, *exc_details): - self._client.__exit__(*exc_details) diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/_configuration.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/_configuration.py deleted file mode 100644 index 1aa9c858b5b8..000000000000 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/_configuration.py +++ /dev/null @@ -1,71 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- -from typing import TYPE_CHECKING - -from azure.core.configuration import Configuration -from azure.core.pipeline import policies -from azure.mgmt.core.policies import ARMChallengeAuthenticationPolicy, ARMHttpLoggingPolicy - -from ._version import VERSION - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any - - from azure.core.credentials import TokenCredential - -class AzureMachineLearningWorkspacesConfiguration(Configuration): - """Configuration for AzureMachineLearningWorkspaces. - - Note that all parameters used to create this instance are saved as instance - attributes. - - :param credential: Credential needed for the client to connect to Azure. - :type credential: ~azure.core.credentials.TokenCredential - :param subscription_id: The ID of the target subscription. - :type subscription_id: str - """ - - def __init__( - self, - credential, # type: "TokenCredential" - subscription_id, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - if credential is None: - raise ValueError("Parameter 'credential' must not be None.") - if subscription_id is None: - raise ValueError("Parameter 'subscription_id' must not be None.") - super(AzureMachineLearningWorkspacesConfiguration, self).__init__(**kwargs) - - self.credential = credential - self.subscription_id = subscription_id - self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default']) - kwargs.setdefault('sdk_moniker', 'azure-mgmt-machinelearningservices/{}'.format(VERSION)) - self._configure(**kwargs) - - def _configure( - self, - **kwargs # type: Any - ): - # type: (...) -> None - self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) - self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) - self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) - self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) - self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs) - self.retry_policy = kwargs.get('retry_policy') or policies.RetryPolicy(**kwargs) - self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) - self.redirect_policy = kwargs.get('redirect_policy') or policies.RedirectPolicy(**kwargs) - self.authentication_policy = kwargs.get('authentication_policy') - if self.credential and not self.authentication_policy: - self.authentication_policy = ARMChallengeAuthenticationPolicy(self.credential, *self.credential_scopes, **kwargs) diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/_version.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/_version.py deleted file mode 100644 index a30a458f8b5b..000000000000 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/_version.py +++ /dev/null @@ -1,8 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- - -VERSION = "0.1.0" \ No newline at end of file diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/aio/__init__.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/aio/__init__.py deleted file mode 100644 index 872474577c4f..000000000000 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/aio/__init__.py +++ /dev/null @@ -1,10 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from ._azure_machine_learning_workspaces import AzureMachineLearningWorkspaces -__all__ = ['AzureMachineLearningWorkspaces'] diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/aio/_azure_machine_learning_workspaces.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/aio/_azure_machine_learning_workspaces.py deleted file mode 100644 index e8ec4203da4b..000000000000 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/aio/_azure_machine_learning_workspaces.py +++ /dev/null @@ -1,1421 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from typing import Any, Optional, TYPE_CHECKING - -from msrest import Deserializer, Serializer - -from azure.mgmt.core import AsyncARMPipelineClient -from azure.profiles import KnownProfiles, ProfileDefinition -from azure.profiles.multiapiclient import MultiApiClientMixin - -from ._configuration import AzureMachineLearningWorkspacesConfiguration - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from azure.core.credentials import TokenCredential - from azure.core.credentials_async import AsyncTokenCredential - -class _SDKClient(object): - def __init__(self, *args, **kwargs): - """This is a fake class to support current implemetation of MultiApiClientMixin." - Will be removed in final version of multiapi azure-core based client - """ - pass - -class AzureMachineLearningWorkspaces(MultiApiClientMixin, _SDKClient): - """These APIs allow end users to operate on Azure Machine Learning Workspace resources. - - This ready contains multiple API versions, to help you deal with all of the Azure clouds - (Azure Stack, Azure Government, Azure China, etc.). - By default, it uses the latest API version available on public Azure. - For production, you should stick to a particular api-version and/or profile. - The profile sets a mapping between an operation group and its API version. - The api-version parameter sets the default API version if the operation - group is not described in the profile. - - :param credential: Credential needed for the client to connect to Azure. - :type credential: ~azure.core.credentials_async.AsyncTokenCredential - :param subscription_id: The ID of the target subscription. - :type subscription_id: str - :param api_version: API version to use if no profile is provided, or if missing in profile. - :type api_version: str - :param base_url: Service URL - :type base_url: str - :param profile: A profile definition, from KnownProfiles to dict. - :type profile: azure.profiles.KnownProfiles - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - """ - - DEFAULT_API_VERSION = '2022-10-01' - _PROFILE_TAG = "azure.mgmt.machinelearningservices.AzureMachineLearningWorkspaces" - LATEST_PROFILE = ProfileDefinition({ - _PROFILE_TAG: { - None: DEFAULT_API_VERSION, - 'assets': '1.0.0', - 'async_operations': 'v1.0', - 'batch_job_deployment': '2020-09-01-dataplanepreview', - 'batch_job_endpoint': '2020-09-01-dataplanepreview', - 'data_call': '1.5.0', - 'data_container': '1.5.0', - 'data_references': '2021-10-01-dataplanepreview', - 'data_version': '1.5.0', - 'dataset_containers': '2021-10-01', - 'dataset_controller_v2': '1.5.0', - 'dataset_v2': '1.5.0', - 'dataset_versions': '2021-10-01', - 'datasets_v1': '1.5.0', - 'delete': 'v1.0', - 'events': 'v1.0', - 'experiments': 'v1.0', - 'extensive_model': '1.0.0', - 'get_operation_status': '1.5.0', - 'metric': 'v1.0', - 'migration': '1.0.0', - 'models': '1.0.0', - 'registry_management_non_workspace': 'v1.0', - 'resource_management_asset_reference': '2021-10-01-dataplanepreview', - 'run': 'v1.0', - 'run_artifacts': 'v1.0', - 'runs': 'v1.0', - 'spans': 'v1.0', - 'temporary_data_references': '2021-10-01-dataplanepreview', - }}, - _PROFILE_TAG + " latest" - ) - - def __init__( - self, - credential: "AsyncTokenCredential", - subscription_id: str, - api_version: Optional[str] = None, - base_url: str = "https://management.azure.com", - profile: KnownProfiles = KnownProfiles.default, - **kwargs # type: Any - ) -> None: - self._config = AzureMachineLearningWorkspacesConfiguration(credential, subscription_id, **kwargs) - self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs) - super(AzureMachineLearningWorkspaces, self).__init__( - api_version=api_version, - profile=profile - ) - - @classmethod - def _models_dict(cls, api_version): - return {k: v for k, v in cls.models(api_version).__dict__.items() if isinstance(v, type)} - - @classmethod - def models(cls, api_version=DEFAULT_API_VERSION): - """Module depends on the API version: - - * 1.5.0: :mod:`dataset_dataplane.models` - * 1.0.0: :mod:`model_dataplane.models` - * v1.0: :mod:`registry_discovery.models` - * v1.0: :mod:`runhistory.models` - * 2020-09-01-dataplanepreview: :mod:`v2020_09_01_dataplanepreview.models` - * 2021-10-01: :mod:`v2021_10_01.models` - * 2021-10-01-dataplanepreview: :mod:`v2021_10_01_dataplanepreview.models` - * 2022-01-01-preview: :mod:`v2022_01_01_preview.models` - * 2022-02-01-preview: :mod:`v2022_02_01_preview.models` - * 2022-05-01: :mod:`v2022_05_01.models` - * 2022-06-01-preview: :mod:`v2022_06_01_preview.models` - * 2022-10-01: :mod:`v2022_10_01.models` - * 2022-10-01-preview: :mod:`v2022_10_01_preview.models` - """ - if api_version == '1.5.0': - from ..dataset_dataplane import models - return models - elif api_version == '1.0.0': - from ..model_dataplane import models - return models - elif api_version == 'v1.0': - from ..registry_discovery import models - return models - elif api_version == 'v1.0': - from ..runhistory import models - return models - elif api_version == '2020-09-01-dataplanepreview': - from ..v2020_09_01_dataplanepreview import models - return models - elif api_version == '2021-10-01': - from ..v2021_10_01 import models - return models - elif api_version == '2021-10-01-dataplanepreview': - from ..v2021_10_01_dataplanepreview import models - return models - elif api_version == '2022-01-01-preview': - from ..v2022_01_01_preview import models - return models - elif api_version == '2022-02-01-preview': - from ..v2022_02_01_preview import models - return models - elif api_version == '2022-05-01': - from ..v2022_05_01 import models - return models - elif api_version == '2022-06-01-preview': - from ..v2022_06_01_preview import models - return models - elif api_version == '2022-10-01': - from ..v2022_10_01 import models - return models - elif api_version == '2022-10-01-preview': - from ..v2022_10_01_preview import models - return models - raise ValueError("API version {} is not available".format(api_version)) - - @property - def assets(self): - """Instance depends on the API version: - - * 1.0.0: :class:`AssetsOperations` - """ - api_version = self._get_api_version('assets') - if api_version == '1.0.0': - from ..model_dataplane.aio.operations import AssetsOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'assets'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def async_operations(self): - """Instance depends on the API version: - - * v1.0: :class:`AsyncOperationsOperations` - """ - api_version = self._get_api_version('async_operations') - if api_version == 'v1.0': - from ..registry_discovery.aio.operations import AsyncOperationsOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'async_operations'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def batch_deployments(self): - """Instance depends on the API version: - - * 2021-10-01: :class:`BatchDeploymentsOperations` - * 2022-02-01-preview: :class:`BatchDeploymentsOperations` - * 2022-05-01: :class:`BatchDeploymentsOperations` - * 2022-06-01-preview: :class:`BatchDeploymentsOperations` - * 2022-10-01: :class:`BatchDeploymentsOperations` - * 2022-10-01-preview: :class:`BatchDeploymentsOperations` - """ - api_version = self._get_api_version('batch_deployments') - if api_version == '2021-10-01': - from ..v2021_10_01.aio.operations import BatchDeploymentsOperations as OperationClass - elif api_version == '2022-02-01-preview': - from ..v2022_02_01_preview.aio.operations import BatchDeploymentsOperations as OperationClass - elif api_version == '2022-05-01': - from ..v2022_05_01.aio.operations import BatchDeploymentsOperations as OperationClass - elif api_version == '2022-06-01-preview': - from ..v2022_06_01_preview.aio.operations import BatchDeploymentsOperations as OperationClass - elif api_version == '2022-10-01': - from ..v2022_10_01.aio.operations import BatchDeploymentsOperations as OperationClass - elif api_version == '2022-10-01-preview': - from ..v2022_10_01_preview.aio.operations import BatchDeploymentsOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'batch_deployments'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def batch_endpoints(self): - """Instance depends on the API version: - - * 2021-10-01: :class:`BatchEndpointsOperations` - * 2022-02-01-preview: :class:`BatchEndpointsOperations` - * 2022-05-01: :class:`BatchEndpointsOperations` - * 2022-06-01-preview: :class:`BatchEndpointsOperations` - * 2022-10-01: :class:`BatchEndpointsOperations` - * 2022-10-01-preview: :class:`BatchEndpointsOperations` - """ - api_version = self._get_api_version('batch_endpoints') - if api_version == '2021-10-01': - from ..v2021_10_01.aio.operations import BatchEndpointsOperations as OperationClass - elif api_version == '2022-02-01-preview': - from ..v2022_02_01_preview.aio.operations import BatchEndpointsOperations as OperationClass - elif api_version == '2022-05-01': - from ..v2022_05_01.aio.operations import BatchEndpointsOperations as OperationClass - elif api_version == '2022-06-01-preview': - from ..v2022_06_01_preview.aio.operations import BatchEndpointsOperations as OperationClass - elif api_version == '2022-10-01': - from ..v2022_10_01.aio.operations import BatchEndpointsOperations as OperationClass - elif api_version == '2022-10-01-preview': - from ..v2022_10_01_preview.aio.operations import BatchEndpointsOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'batch_endpoints'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def batch_job_deployment(self): - """Instance depends on the API version: - - * 2020-09-01-dataplanepreview: :class:`BatchJobDeploymentOperations` - """ - api_version = self._get_api_version('batch_job_deployment') - if api_version == '2020-09-01-dataplanepreview': - from ..v2020_09_01_dataplanepreview.aio.operations import BatchJobDeploymentOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'batch_job_deployment'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def batch_job_endpoint(self): - """Instance depends on the API version: - - * 2020-09-01-dataplanepreview: :class:`BatchJobEndpointOperations` - """ - api_version = self._get_api_version('batch_job_endpoint') - if api_version == '2020-09-01-dataplanepreview': - from ..v2020_09_01_dataplanepreview.aio.operations import BatchJobEndpointOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'batch_job_endpoint'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def code_containers(self): - """Instance depends on the API version: - - * 2021-10-01: :class:`CodeContainersOperations` - * 2021-10-01-dataplanepreview: :class:`CodeContainersOperations` - * 2022-02-01-preview: :class:`CodeContainersOperations` - * 2022-05-01: :class:`CodeContainersOperations` - * 2022-06-01-preview: :class:`CodeContainersOperations` - * 2022-10-01: :class:`CodeContainersOperations` - * 2022-10-01-preview: :class:`CodeContainersOperations` - """ - api_version = self._get_api_version('code_containers') - if api_version == '2021-10-01': - from ..v2021_10_01.aio.operations import CodeContainersOperations as OperationClass - elif api_version == '2021-10-01-dataplanepreview': - from ..v2021_10_01_dataplanepreview.aio.operations import CodeContainersOperations as OperationClass - elif api_version == '2022-02-01-preview': - from ..v2022_02_01_preview.aio.operations import CodeContainersOperations as OperationClass - elif api_version == '2022-05-01': - from ..v2022_05_01.aio.operations import CodeContainersOperations as OperationClass - elif api_version == '2022-06-01-preview': - from ..v2022_06_01_preview.aio.operations import CodeContainersOperations as OperationClass - elif api_version == '2022-10-01': - from ..v2022_10_01.aio.operations import CodeContainersOperations as OperationClass - elif api_version == '2022-10-01-preview': - from ..v2022_10_01_preview.aio.operations import CodeContainersOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'code_containers'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def code_versions(self): - """Instance depends on the API version: - - * 2021-10-01: :class:`CodeVersionsOperations` - * 2021-10-01-dataplanepreview: :class:`CodeVersionsOperations` - * 2022-02-01-preview: :class:`CodeVersionsOperations` - * 2022-05-01: :class:`CodeVersionsOperations` - * 2022-06-01-preview: :class:`CodeVersionsOperations` - * 2022-10-01: :class:`CodeVersionsOperations` - * 2022-10-01-preview: :class:`CodeVersionsOperations` - """ - api_version = self._get_api_version('code_versions') - if api_version == '2021-10-01': - from ..v2021_10_01.aio.operations import CodeVersionsOperations as OperationClass - elif api_version == '2021-10-01-dataplanepreview': - from ..v2021_10_01_dataplanepreview.aio.operations import CodeVersionsOperations as OperationClass - elif api_version == '2022-02-01-preview': - from ..v2022_02_01_preview.aio.operations import CodeVersionsOperations as OperationClass - elif api_version == '2022-05-01': - from ..v2022_05_01.aio.operations import CodeVersionsOperations as OperationClass - elif api_version == '2022-06-01-preview': - from ..v2022_06_01_preview.aio.operations import CodeVersionsOperations as OperationClass - elif api_version == '2022-10-01': - from ..v2022_10_01.aio.operations import CodeVersionsOperations as OperationClass - elif api_version == '2022-10-01-preview': - from ..v2022_10_01_preview.aio.operations import CodeVersionsOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'code_versions'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def component_containers(self): - """Instance depends on the API version: - - * 2021-10-01: :class:`ComponentContainersOperations` - * 2021-10-01-dataplanepreview: :class:`ComponentContainersOperations` - * 2022-02-01-preview: :class:`ComponentContainersOperations` - * 2022-05-01: :class:`ComponentContainersOperations` - * 2022-06-01-preview: :class:`ComponentContainersOperations` - * 2022-10-01: :class:`ComponentContainersOperations` - * 2022-10-01-preview: :class:`ComponentContainersOperations` - """ - api_version = self._get_api_version('component_containers') - if api_version == '2021-10-01': - from ..v2021_10_01.aio.operations import ComponentContainersOperations as OperationClass - elif api_version == '2021-10-01-dataplanepreview': - from ..v2021_10_01_dataplanepreview.aio.operations import ComponentContainersOperations as OperationClass - elif api_version == '2022-02-01-preview': - from ..v2022_02_01_preview.aio.operations import ComponentContainersOperations as OperationClass - elif api_version == '2022-05-01': - from ..v2022_05_01.aio.operations import ComponentContainersOperations as OperationClass - elif api_version == '2022-06-01-preview': - from ..v2022_06_01_preview.aio.operations import ComponentContainersOperations as OperationClass - elif api_version == '2022-10-01': - from ..v2022_10_01.aio.operations import ComponentContainersOperations as OperationClass - elif api_version == '2022-10-01-preview': - from ..v2022_10_01_preview.aio.operations import ComponentContainersOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'component_containers'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def component_versions(self): - """Instance depends on the API version: - - * 2021-10-01: :class:`ComponentVersionsOperations` - * 2021-10-01-dataplanepreview: :class:`ComponentVersionsOperations` - * 2022-02-01-preview: :class:`ComponentVersionsOperations` - * 2022-05-01: :class:`ComponentVersionsOperations` - * 2022-06-01-preview: :class:`ComponentVersionsOperations` - * 2022-10-01: :class:`ComponentVersionsOperations` - * 2022-10-01-preview: :class:`ComponentVersionsOperations` - """ - api_version = self._get_api_version('component_versions') - if api_version == '2021-10-01': - from ..v2021_10_01.aio.operations import ComponentVersionsOperations as OperationClass - elif api_version == '2021-10-01-dataplanepreview': - from ..v2021_10_01_dataplanepreview.aio.operations import ComponentVersionsOperations as OperationClass - elif api_version == '2022-02-01-preview': - from ..v2022_02_01_preview.aio.operations import ComponentVersionsOperations as OperationClass - elif api_version == '2022-05-01': - from ..v2022_05_01.aio.operations import ComponentVersionsOperations as OperationClass - elif api_version == '2022-06-01-preview': - from ..v2022_06_01_preview.aio.operations import ComponentVersionsOperations as OperationClass - elif api_version == '2022-10-01': - from ..v2022_10_01.aio.operations import ComponentVersionsOperations as OperationClass - elif api_version == '2022-10-01-preview': - from ..v2022_10_01_preview.aio.operations import ComponentVersionsOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'component_versions'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def compute(self): - """Instance depends on the API version: - - * 2021-10-01: :class:`ComputeOperations` - * 2022-01-01-preview: :class:`ComputeOperations` - * 2022-05-01: :class:`ComputeOperations` - * 2022-10-01: :class:`ComputeOperations` - * 2022-10-01-preview: :class:`ComputeOperations` - """ - api_version = self._get_api_version('compute') - if api_version == '2021-10-01': - from ..v2021_10_01.aio.operations import ComputeOperations as OperationClass - elif api_version == '2022-01-01-preview': - from ..v2022_01_01_preview.aio.operations import ComputeOperations as OperationClass - elif api_version == '2022-05-01': - from ..v2022_05_01.aio.operations import ComputeOperations as OperationClass - elif api_version == '2022-10-01': - from ..v2022_10_01.aio.operations import ComputeOperations as OperationClass - elif api_version == '2022-10-01-preview': - from ..v2022_10_01_preview.aio.operations import ComputeOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'compute'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def data_call(self): - """Instance depends on the API version: - - * 1.5.0: :class:`DataCallOperations` - """ - api_version = self._get_api_version('data_call') - if api_version == '1.5.0': - from ..dataset_dataplane.aio.operations import DataCallOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'data_call'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def data_container(self): - """Instance depends on the API version: - - * 1.5.0: :class:`DataContainerOperations` - """ - api_version = self._get_api_version('data_container') - if api_version == '1.5.0': - from ..dataset_dataplane.aio.operations import DataContainerOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'data_container'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def data_containers(self): - """Instance depends on the API version: - - * 2021-10-01-dataplanepreview: :class:`DataContainersOperations` - * 2022-02-01-preview: :class:`DataContainersOperations` - * 2022-05-01: :class:`DataContainersOperations` - * 2022-06-01-preview: :class:`DataContainersOperations` - * 2022-10-01: :class:`DataContainersOperations` - * 2022-10-01-preview: :class:`DataContainersOperations` - """ - api_version = self._get_api_version('data_containers') - if api_version == '2021-10-01-dataplanepreview': - from ..v2021_10_01_dataplanepreview.aio.operations import DataContainersOperations as OperationClass - elif api_version == '2022-02-01-preview': - from ..v2022_02_01_preview.aio.operations import DataContainersOperations as OperationClass - elif api_version == '2022-05-01': - from ..v2022_05_01.aio.operations import DataContainersOperations as OperationClass - elif api_version == '2022-06-01-preview': - from ..v2022_06_01_preview.aio.operations import DataContainersOperations as OperationClass - elif api_version == '2022-10-01': - from ..v2022_10_01.aio.operations import DataContainersOperations as OperationClass - elif api_version == '2022-10-01-preview': - from ..v2022_10_01_preview.aio.operations import DataContainersOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'data_containers'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def data_references(self): - """Instance depends on the API version: - - * 2021-10-01-dataplanepreview: :class:`DataReferencesOperations` - """ - api_version = self._get_api_version('data_references') - if api_version == '2021-10-01-dataplanepreview': - from ..v2021_10_01_dataplanepreview.aio.operations import DataReferencesOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'data_references'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def data_version(self): - """Instance depends on the API version: - - * 1.5.0: :class:`DataVersionOperations` - """ - api_version = self._get_api_version('data_version') - if api_version == '1.5.0': - from ..dataset_dataplane.aio.operations import DataVersionOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'data_version'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def data_versions(self): - """Instance depends on the API version: - - * 2021-10-01-dataplanepreview: :class:`DataVersionsOperations` - * 2022-02-01-preview: :class:`DataVersionsOperations` - * 2022-05-01: :class:`DataVersionsOperations` - * 2022-06-01-preview: :class:`DataVersionsOperations` - * 2022-10-01: :class:`DataVersionsOperations` - * 2022-10-01-preview: :class:`DataVersionsOperations` - """ - api_version = self._get_api_version('data_versions') - if api_version == '2021-10-01-dataplanepreview': - from ..v2021_10_01_dataplanepreview.aio.operations import DataVersionsOperations as OperationClass - elif api_version == '2022-02-01-preview': - from ..v2022_02_01_preview.aio.operations import DataVersionsOperations as OperationClass - elif api_version == '2022-05-01': - from ..v2022_05_01.aio.operations import DataVersionsOperations as OperationClass - elif api_version == '2022-06-01-preview': - from ..v2022_06_01_preview.aio.operations import DataVersionsOperations as OperationClass - elif api_version == '2022-10-01': - from ..v2022_10_01.aio.operations import DataVersionsOperations as OperationClass - elif api_version == '2022-10-01-preview': - from ..v2022_10_01_preview.aio.operations import DataVersionsOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'data_versions'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def dataset_containers(self): - """Instance depends on the API version: - - * 2021-10-01: :class:`DatasetContainersOperations` - """ - api_version = self._get_api_version('dataset_containers') - if api_version == '2021-10-01': - from ..v2021_10_01.aio.operations import DatasetContainersOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'dataset_containers'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def dataset_controller_v2(self): - """Instance depends on the API version: - - * 1.5.0: :class:`DatasetControllerV2Operations` - """ - api_version = self._get_api_version('dataset_controller_v2') - if api_version == '1.5.0': - from ..dataset_dataplane.aio.operations import DatasetControllerV2Operations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'dataset_controller_v2'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def dataset_v2(self): - """Instance depends on the API version: - - * 1.5.0: :class:`DatasetV2Operations` - """ - api_version = self._get_api_version('dataset_v2') - if api_version == '1.5.0': - from ..dataset_dataplane.aio.operations import DatasetV2Operations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'dataset_v2'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def dataset_versions(self): - """Instance depends on the API version: - - * 2021-10-01: :class:`DatasetVersionsOperations` - """ - api_version = self._get_api_version('dataset_versions') - if api_version == '2021-10-01': - from ..v2021_10_01.aio.operations import DatasetVersionsOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'dataset_versions'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def datasets_v1(self): - """Instance depends on the API version: - - * 1.5.0: :class:`DatasetsV1Operations` - """ - api_version = self._get_api_version('datasets_v1') - if api_version == '1.5.0': - from ..dataset_dataplane.aio.operations import DatasetsV1Operations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'datasets_v1'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def datastores(self): - """Instance depends on the API version: - - * 2021-10-01: :class:`DatastoresOperations` - * 2022-02-01-preview: :class:`DatastoresOperations` - * 2022-05-01: :class:`DatastoresOperations` - * 2022-06-01-preview: :class:`DatastoresOperations` - * 2022-10-01: :class:`DatastoresOperations` - * 2022-10-01-preview: :class:`DatastoresOperations` - """ - api_version = self._get_api_version('datastores') - if api_version == '2021-10-01': - from ..v2021_10_01.aio.operations import DatastoresOperations as OperationClass - elif api_version == '2022-02-01-preview': - from ..v2022_02_01_preview.aio.operations import DatastoresOperations as OperationClass - elif api_version == '2022-05-01': - from ..v2022_05_01.aio.operations import DatastoresOperations as OperationClass - elif api_version == '2022-06-01-preview': - from ..v2022_06_01_preview.aio.operations import DatastoresOperations as OperationClass - elif api_version == '2022-10-01': - from ..v2022_10_01.aio.operations import DatastoresOperations as OperationClass - elif api_version == '2022-10-01-preview': - from ..v2022_10_01_preview.aio.operations import DatastoresOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'datastores'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def delete(self): - """Instance depends on the API version: - - * 1.5.0: :class:`DeleteOperations` - * v1.0: :class:`DeleteOperations` - """ - api_version = self._get_api_version('delete') - if api_version == '1.5.0': - from ..dataset_dataplane.aio.operations import DeleteOperations as OperationClass - elif api_version == 'v1.0': - from ..runhistory.aio.operations import DeleteOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'delete'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def environment_containers(self): - """Instance depends on the API version: - - * 2021-10-01: :class:`EnvironmentContainersOperations` - * 2021-10-01-dataplanepreview: :class:`EnvironmentContainersOperations` - * 2022-02-01-preview: :class:`EnvironmentContainersOperations` - * 2022-05-01: :class:`EnvironmentContainersOperations` - * 2022-06-01-preview: :class:`EnvironmentContainersOperations` - * 2022-10-01: :class:`EnvironmentContainersOperations` - * 2022-10-01-preview: :class:`EnvironmentContainersOperations` - """ - api_version = self._get_api_version('environment_containers') - if api_version == '2021-10-01': - from ..v2021_10_01.aio.operations import EnvironmentContainersOperations as OperationClass - elif api_version == '2021-10-01-dataplanepreview': - from ..v2021_10_01_dataplanepreview.aio.operations import EnvironmentContainersOperations as OperationClass - elif api_version == '2022-02-01-preview': - from ..v2022_02_01_preview.aio.operations import EnvironmentContainersOperations as OperationClass - elif api_version == '2022-05-01': - from ..v2022_05_01.aio.operations import EnvironmentContainersOperations as OperationClass - elif api_version == '2022-06-01-preview': - from ..v2022_06_01_preview.aio.operations import EnvironmentContainersOperations as OperationClass - elif api_version == '2022-10-01': - from ..v2022_10_01.aio.operations import EnvironmentContainersOperations as OperationClass - elif api_version == '2022-10-01-preview': - from ..v2022_10_01_preview.aio.operations import EnvironmentContainersOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'environment_containers'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def environment_versions(self): - """Instance depends on the API version: - - * 2021-10-01: :class:`EnvironmentVersionsOperations` - * 2021-10-01-dataplanepreview: :class:`EnvironmentVersionsOperations` - * 2022-02-01-preview: :class:`EnvironmentVersionsOperations` - * 2022-05-01: :class:`EnvironmentVersionsOperations` - * 2022-06-01-preview: :class:`EnvironmentVersionsOperations` - * 2022-10-01: :class:`EnvironmentVersionsOperations` - * 2022-10-01-preview: :class:`EnvironmentVersionsOperations` - """ - api_version = self._get_api_version('environment_versions') - if api_version == '2021-10-01': - from ..v2021_10_01.aio.operations import EnvironmentVersionsOperations as OperationClass - elif api_version == '2021-10-01-dataplanepreview': - from ..v2021_10_01_dataplanepreview.aio.operations import EnvironmentVersionsOperations as OperationClass - elif api_version == '2022-02-01-preview': - from ..v2022_02_01_preview.aio.operations import EnvironmentVersionsOperations as OperationClass - elif api_version == '2022-05-01': - from ..v2022_05_01.aio.operations import EnvironmentVersionsOperations as OperationClass - elif api_version == '2022-06-01-preview': - from ..v2022_06_01_preview.aio.operations import EnvironmentVersionsOperations as OperationClass - elif api_version == '2022-10-01': - from ..v2022_10_01.aio.operations import EnvironmentVersionsOperations as OperationClass - elif api_version == '2022-10-01-preview': - from ..v2022_10_01_preview.aio.operations import EnvironmentVersionsOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'environment_versions'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def events(self): - """Instance depends on the API version: - - * v1.0: :class:`EventsOperations` - """ - api_version = self._get_api_version('events') - if api_version == 'v1.0': - from ..runhistory.aio.operations import EventsOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'events'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def experiments(self): - """Instance depends on the API version: - - * v1.0: :class:`ExperimentsOperations` - """ - api_version = self._get_api_version('experiments') - if api_version == 'v1.0': - from ..runhistory.aio.operations import ExperimentsOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'experiments'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def extensive_model(self): - """Instance depends on the API version: - - * 1.0.0: :class:`ExtensiveModelOperations` - """ - api_version = self._get_api_version('extensive_model') - if api_version == '1.0.0': - from ..model_dataplane.aio.operations import ExtensiveModelOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'extensive_model'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def get_operation_status(self): - """Instance depends on the API version: - - * 1.5.0: :class:`GetOperationStatusOperations` - """ - api_version = self._get_api_version('get_operation_status') - if api_version == '1.5.0': - from ..dataset_dataplane.aio.operations import GetOperationStatusOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'get_operation_status'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def jobs(self): - """Instance depends on the API version: - - * 2021-10-01: :class:`JobsOperations` - * 2022-02-01-preview: :class:`JobsOperations` - * 2022-05-01: :class:`JobsOperations` - * 2022-06-01-preview: :class:`JobsOperations` - * 2022-10-01: :class:`JobsOperations` - * 2022-10-01-preview: :class:`JobsOperations` - """ - api_version = self._get_api_version('jobs') - if api_version == '2021-10-01': - from ..v2021_10_01.aio.operations import JobsOperations as OperationClass - elif api_version == '2022-02-01-preview': - from ..v2022_02_01_preview.aio.operations import JobsOperations as OperationClass - elif api_version == '2022-05-01': - from ..v2022_05_01.aio.operations import JobsOperations as OperationClass - elif api_version == '2022-06-01-preview': - from ..v2022_06_01_preview.aio.operations import JobsOperations as OperationClass - elif api_version == '2022-10-01': - from ..v2022_10_01.aio.operations import JobsOperations as OperationClass - elif api_version == '2022-10-01-preview': - from ..v2022_10_01_preview.aio.operations import JobsOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'jobs'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def labeling_jobs(self): - """Instance depends on the API version: - - * 2022-06-01-preview: :class:`LabelingJobsOperations` - * 2022-10-01-preview: :class:`LabelingJobsOperations` - """ - api_version = self._get_api_version('labeling_jobs') - if api_version == '2022-06-01-preview': - from ..v2022_06_01_preview.aio.operations import LabelingJobsOperations as OperationClass - elif api_version == '2022-10-01-preview': - from ..v2022_10_01_preview.aio.operations import LabelingJobsOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'labeling_jobs'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def metric(self): - """Instance depends on the API version: - - * v1.0: :class:`MetricOperations` - """ - api_version = self._get_api_version('metric') - if api_version == 'v1.0': - from ..runhistory.aio.operations import MetricOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'metric'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def migration(self): - """Instance depends on the API version: - - * 1.0.0: :class:`MigrationOperations` - """ - api_version = self._get_api_version('migration') - if api_version == '1.0.0': - from ..model_dataplane.aio.operations import MigrationOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'migration'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def model_containers(self): - """Instance depends on the API version: - - * 2021-10-01: :class:`ModelContainersOperations` - * 2021-10-01-dataplanepreview: :class:`ModelContainersOperations` - * 2022-02-01-preview: :class:`ModelContainersOperations` - * 2022-05-01: :class:`ModelContainersOperations` - * 2022-06-01-preview: :class:`ModelContainersOperations` - * 2022-10-01: :class:`ModelContainersOperations` - * 2022-10-01-preview: :class:`ModelContainersOperations` - """ - api_version = self._get_api_version('model_containers') - if api_version == '2021-10-01': - from ..v2021_10_01.aio.operations import ModelContainersOperations as OperationClass - elif api_version == '2021-10-01-dataplanepreview': - from ..v2021_10_01_dataplanepreview.aio.operations import ModelContainersOperations as OperationClass - elif api_version == '2022-02-01-preview': - from ..v2022_02_01_preview.aio.operations import ModelContainersOperations as OperationClass - elif api_version == '2022-05-01': - from ..v2022_05_01.aio.operations import ModelContainersOperations as OperationClass - elif api_version == '2022-06-01-preview': - from ..v2022_06_01_preview.aio.operations import ModelContainersOperations as OperationClass - elif api_version == '2022-10-01': - from ..v2022_10_01.aio.operations import ModelContainersOperations as OperationClass - elif api_version == '2022-10-01-preview': - from ..v2022_10_01_preview.aio.operations import ModelContainersOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'model_containers'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def model_versions(self): - """Instance depends on the API version: - - * 2021-10-01: :class:`ModelVersionsOperations` - * 2021-10-01-dataplanepreview: :class:`ModelVersionsOperations` - * 2022-02-01-preview: :class:`ModelVersionsOperations` - * 2022-05-01: :class:`ModelVersionsOperations` - * 2022-06-01-preview: :class:`ModelVersionsOperations` - * 2022-10-01: :class:`ModelVersionsOperations` - * 2022-10-01-preview: :class:`ModelVersionsOperations` - """ - api_version = self._get_api_version('model_versions') - if api_version == '2021-10-01': - from ..v2021_10_01.aio.operations import ModelVersionsOperations as OperationClass - elif api_version == '2021-10-01-dataplanepreview': - from ..v2021_10_01_dataplanepreview.aio.operations import ModelVersionsOperations as OperationClass - elif api_version == '2022-02-01-preview': - from ..v2022_02_01_preview.aio.operations import ModelVersionsOperations as OperationClass - elif api_version == '2022-05-01': - from ..v2022_05_01.aio.operations import ModelVersionsOperations as OperationClass - elif api_version == '2022-06-01-preview': - from ..v2022_06_01_preview.aio.operations import ModelVersionsOperations as OperationClass - elif api_version == '2022-10-01': - from ..v2022_10_01.aio.operations import ModelVersionsOperations as OperationClass - elif api_version == '2022-10-01-preview': - from ..v2022_10_01_preview.aio.operations import ModelVersionsOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'model_versions'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def models(self): - """Instance depends on the API version: - - * 1.0.0: :class:`ModelsOperations` - """ - api_version = self._get_api_version('models') - if api_version == '1.0.0': - from ..model_dataplane.aio.operations import ModelsOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'models'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def online_deployments(self): - """Instance depends on the API version: - - * 2021-10-01: :class:`OnlineDeploymentsOperations` - * 2022-02-01-preview: :class:`OnlineDeploymentsOperations` - * 2022-05-01: :class:`OnlineDeploymentsOperations` - * 2022-06-01-preview: :class:`OnlineDeploymentsOperations` - * 2022-10-01: :class:`OnlineDeploymentsOperations` - * 2022-10-01-preview: :class:`OnlineDeploymentsOperations` - """ - api_version = self._get_api_version('online_deployments') - if api_version == '2021-10-01': - from ..v2021_10_01.aio.operations import OnlineDeploymentsOperations as OperationClass - elif api_version == '2022-02-01-preview': - from ..v2022_02_01_preview.aio.operations import OnlineDeploymentsOperations as OperationClass - elif api_version == '2022-05-01': - from ..v2022_05_01.aio.operations import OnlineDeploymentsOperations as OperationClass - elif api_version == '2022-06-01-preview': - from ..v2022_06_01_preview.aio.operations import OnlineDeploymentsOperations as OperationClass - elif api_version == '2022-10-01': - from ..v2022_10_01.aio.operations import OnlineDeploymentsOperations as OperationClass - elif api_version == '2022-10-01-preview': - from ..v2022_10_01_preview.aio.operations import OnlineDeploymentsOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'online_deployments'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def online_endpoints(self): - """Instance depends on the API version: - - * 2021-10-01: :class:`OnlineEndpointsOperations` - * 2022-02-01-preview: :class:`OnlineEndpointsOperations` - * 2022-05-01: :class:`OnlineEndpointsOperations` - * 2022-06-01-preview: :class:`OnlineEndpointsOperations` - * 2022-10-01: :class:`OnlineEndpointsOperations` - * 2022-10-01-preview: :class:`OnlineEndpointsOperations` - """ - api_version = self._get_api_version('online_endpoints') - if api_version == '2021-10-01': - from ..v2021_10_01.aio.operations import OnlineEndpointsOperations as OperationClass - elif api_version == '2022-02-01-preview': - from ..v2022_02_01_preview.aio.operations import OnlineEndpointsOperations as OperationClass - elif api_version == '2022-05-01': - from ..v2022_05_01.aio.operations import OnlineEndpointsOperations as OperationClass - elif api_version == '2022-06-01-preview': - from ..v2022_06_01_preview.aio.operations import OnlineEndpointsOperations as OperationClass - elif api_version == '2022-10-01': - from ..v2022_10_01.aio.operations import OnlineEndpointsOperations as OperationClass - elif api_version == '2022-10-01-preview': - from ..v2022_10_01_preview.aio.operations import OnlineEndpointsOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'online_endpoints'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def operations(self): - """Instance depends on the API version: - - * 2021-10-01: :class:`Operations` - * 2022-01-01-preview: :class:`Operations` - * 2022-05-01: :class:`Operations` - * 2022-10-01: :class:`Operations` - * 2022-10-01-preview: :class:`Operations` - """ - api_version = self._get_api_version('operations') - if api_version == '2021-10-01': - from ..v2021_10_01.aio.operations import Operations as OperationClass - elif api_version == '2022-01-01-preview': - from ..v2022_01_01_preview.aio.operations import Operations as OperationClass - elif api_version == '2022-05-01': - from ..v2022_05_01.aio.operations import Operations as OperationClass - elif api_version == '2022-10-01': - from ..v2022_10_01.aio.operations import Operations as OperationClass - elif api_version == '2022-10-01-preview': - from ..v2022_10_01_preview.aio.operations import Operations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'operations'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def private_endpoint_connections(self): - """Instance depends on the API version: - - * 2021-10-01: :class:`PrivateEndpointConnectionsOperations` - * 2022-01-01-preview: :class:`PrivateEndpointConnectionsOperations` - * 2022-05-01: :class:`PrivateEndpointConnectionsOperations` - * 2022-10-01: :class:`PrivateEndpointConnectionsOperations` - * 2022-10-01-preview: :class:`PrivateEndpointConnectionsOperations` - """ - api_version = self._get_api_version('private_endpoint_connections') - if api_version == '2021-10-01': - from ..v2021_10_01.aio.operations import PrivateEndpointConnectionsOperations as OperationClass - elif api_version == '2022-01-01-preview': - from ..v2022_01_01_preview.aio.operations import PrivateEndpointConnectionsOperations as OperationClass - elif api_version == '2022-05-01': - from ..v2022_05_01.aio.operations import PrivateEndpointConnectionsOperations as OperationClass - elif api_version == '2022-10-01': - from ..v2022_10_01.aio.operations import PrivateEndpointConnectionsOperations as OperationClass - elif api_version == '2022-10-01-preview': - from ..v2022_10_01_preview.aio.operations import PrivateEndpointConnectionsOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'private_endpoint_connections'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def private_link_resources(self): - """Instance depends on the API version: - - * 2021-10-01: :class:`PrivateLinkResourcesOperations` - * 2022-01-01-preview: :class:`PrivateLinkResourcesOperations` - * 2022-05-01: :class:`PrivateLinkResourcesOperations` - * 2022-10-01: :class:`PrivateLinkResourcesOperations` - * 2022-10-01-preview: :class:`PrivateLinkResourcesOperations` - """ - api_version = self._get_api_version('private_link_resources') - if api_version == '2021-10-01': - from ..v2021_10_01.aio.operations import PrivateLinkResourcesOperations as OperationClass - elif api_version == '2022-01-01-preview': - from ..v2022_01_01_preview.aio.operations import PrivateLinkResourcesOperations as OperationClass - elif api_version == '2022-05-01': - from ..v2022_05_01.aio.operations import PrivateLinkResourcesOperations as OperationClass - elif api_version == '2022-10-01': - from ..v2022_10_01.aio.operations import PrivateLinkResourcesOperations as OperationClass - elif api_version == '2022-10-01-preview': - from ..v2022_10_01_preview.aio.operations import PrivateLinkResourcesOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'private_link_resources'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def quotas(self): - """Instance depends on the API version: - - * 2021-10-01: :class:`QuotasOperations` - * 2022-01-01-preview: :class:`QuotasOperations` - * 2022-05-01: :class:`QuotasOperations` - * 2022-10-01: :class:`QuotasOperations` - * 2022-10-01-preview: :class:`QuotasOperations` - """ - api_version = self._get_api_version('quotas') - if api_version == '2021-10-01': - from ..v2021_10_01.aio.operations import QuotasOperations as OperationClass - elif api_version == '2022-01-01-preview': - from ..v2022_01_01_preview.aio.operations import QuotasOperations as OperationClass - elif api_version == '2022-05-01': - from ..v2022_05_01.aio.operations import QuotasOperations as OperationClass - elif api_version == '2022-10-01': - from ..v2022_10_01.aio.operations import QuotasOperations as OperationClass - elif api_version == '2022-10-01-preview': - from ..v2022_10_01_preview.aio.operations import QuotasOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'quotas'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def registries(self): - """Instance depends on the API version: - - * 2022-10-01-preview: :class:`RegistriesOperations` - """ - api_version = self._get_api_version('registries') - if api_version == '2022-10-01-preview': - from ..v2022_10_01_preview.aio.operations import RegistriesOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'registries'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def registry_code_containers(self): - """Instance depends on the API version: - - * 2022-10-01-preview: :class:`RegistryCodeContainersOperations` - """ - api_version = self._get_api_version('registry_code_containers') - if api_version == '2022-10-01-preview': - from ..v2022_10_01_preview.aio.operations import RegistryCodeContainersOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'registry_code_containers'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def registry_code_versions(self): - """Instance depends on the API version: - - * 2022-10-01-preview: :class:`RegistryCodeVersionsOperations` - """ - api_version = self._get_api_version('registry_code_versions') - if api_version == '2022-10-01-preview': - from ..v2022_10_01_preview.aio.operations import RegistryCodeVersionsOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'registry_code_versions'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def registry_component_containers(self): - """Instance depends on the API version: - - * 2022-10-01-preview: :class:`RegistryComponentContainersOperations` - """ - api_version = self._get_api_version('registry_component_containers') - if api_version == '2022-10-01-preview': - from ..v2022_10_01_preview.aio.operations import RegistryComponentContainersOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'registry_component_containers'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def registry_component_versions(self): - """Instance depends on the API version: - - * 2022-10-01-preview: :class:`RegistryComponentVersionsOperations` - """ - api_version = self._get_api_version('registry_component_versions') - if api_version == '2022-10-01-preview': - from ..v2022_10_01_preview.aio.operations import RegistryComponentVersionsOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'registry_component_versions'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def registry_environment_containers(self): - """Instance depends on the API version: - - * 2022-10-01-preview: :class:`RegistryEnvironmentContainersOperations` - """ - api_version = self._get_api_version('registry_environment_containers') - if api_version == '2022-10-01-preview': - from ..v2022_10_01_preview.aio.operations import RegistryEnvironmentContainersOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'registry_environment_containers'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def registry_environment_versions(self): - """Instance depends on the API version: - - * 2022-10-01-preview: :class:`RegistryEnvironmentVersionsOperations` - """ - api_version = self._get_api_version('registry_environment_versions') - if api_version == '2022-10-01-preview': - from ..v2022_10_01_preview.aio.operations import RegistryEnvironmentVersionsOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'registry_environment_versions'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def registry_management_non_workspace(self): - """Instance depends on the API version: - - * v1.0: :class:`RegistryManagementNonWorkspaceOperations` - """ - api_version = self._get_api_version('registry_management_non_workspace') - if api_version == 'v1.0': - from ..registry_discovery.aio.operations import RegistryManagementNonWorkspaceOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'registry_management_non_workspace'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def registry_model_containers(self): - """Instance depends on the API version: - - * 2022-10-01-preview: :class:`RegistryModelContainersOperations` - """ - api_version = self._get_api_version('registry_model_containers') - if api_version == '2022-10-01-preview': - from ..v2022_10_01_preview.aio.operations import RegistryModelContainersOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'registry_model_containers'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def registry_model_versions(self): - """Instance depends on the API version: - - * 2022-10-01-preview: :class:`RegistryModelVersionsOperations` - """ - api_version = self._get_api_version('registry_model_versions') - if api_version == '2022-10-01-preview': - from ..v2022_10_01_preview.aio.operations import RegistryModelVersionsOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'registry_model_versions'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def resource_management_asset_reference(self): - """Instance depends on the API version: - - * 2021-10-01-dataplanepreview: :class:`ResourceManagementAssetReferenceOperations` - """ - api_version = self._get_api_version('resource_management_asset_reference') - if api_version == '2021-10-01-dataplanepreview': - from ..v2021_10_01_dataplanepreview.aio.operations import ResourceManagementAssetReferenceOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'resource_management_asset_reference'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def run(self): - """Instance depends on the API version: - - * v1.0: :class:`RunOperations` - """ - api_version = self._get_api_version('run') - if api_version == 'v1.0': - from ..runhistory.aio.operations import RunOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'run'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def run_artifacts(self): - """Instance depends on the API version: - - * v1.0: :class:`RunArtifactsOperations` - """ - api_version = self._get_api_version('run_artifacts') - if api_version == 'v1.0': - from ..runhistory.aio.operations import RunArtifactsOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'run_artifacts'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def runs(self): - """Instance depends on the API version: - - * v1.0: :class:`RunsOperations` - """ - api_version = self._get_api_version('runs') - if api_version == 'v1.0': - from ..runhistory.aio.operations import RunsOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'runs'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def schedules(self): - """Instance depends on the API version: - - * 2022-06-01-preview: :class:`SchedulesOperations` - * 2022-10-01: :class:`SchedulesOperations` - * 2022-10-01-preview: :class:`SchedulesOperations` - """ - api_version = self._get_api_version('schedules') - if api_version == '2022-06-01-preview': - from ..v2022_06_01_preview.aio.operations import SchedulesOperations as OperationClass - elif api_version == '2022-10-01': - from ..v2022_10_01.aio.operations import SchedulesOperations as OperationClass - elif api_version == '2022-10-01-preview': - from ..v2022_10_01_preview.aio.operations import SchedulesOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'schedules'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def spans(self): - """Instance depends on the API version: - - * v1.0: :class:`SpansOperations` - """ - api_version = self._get_api_version('spans') - if api_version == 'v1.0': - from ..runhistory.aio.operations import SpansOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'spans'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def temporary_data_references(self): - """Instance depends on the API version: - - * 2021-10-01-dataplanepreview: :class:`TemporaryDataReferencesOperations` - """ - api_version = self._get_api_version('temporary_data_references') - if api_version == '2021-10-01-dataplanepreview': - from ..v2021_10_01_dataplanepreview.aio.operations import TemporaryDataReferencesOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'temporary_data_references'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def usages(self): - """Instance depends on the API version: - - * 2021-10-01: :class:`UsagesOperations` - * 2022-01-01-preview: :class:`UsagesOperations` - * 2022-05-01: :class:`UsagesOperations` - * 2022-10-01: :class:`UsagesOperations` - * 2022-10-01-preview: :class:`UsagesOperations` - """ - api_version = self._get_api_version('usages') - if api_version == '2021-10-01': - from ..v2021_10_01.aio.operations import UsagesOperations as OperationClass - elif api_version == '2022-01-01-preview': - from ..v2022_01_01_preview.aio.operations import UsagesOperations as OperationClass - elif api_version == '2022-05-01': - from ..v2022_05_01.aio.operations import UsagesOperations as OperationClass - elif api_version == '2022-10-01': - from ..v2022_10_01.aio.operations import UsagesOperations as OperationClass - elif api_version == '2022-10-01-preview': - from ..v2022_10_01_preview.aio.operations import UsagesOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'usages'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def virtual_machine_sizes(self): - """Instance depends on the API version: - - * 2021-10-01: :class:`VirtualMachineSizesOperations` - * 2022-01-01-preview: :class:`VirtualMachineSizesOperations` - * 2022-05-01: :class:`VirtualMachineSizesOperations` - * 2022-10-01: :class:`VirtualMachineSizesOperations` - * 2022-10-01-preview: :class:`VirtualMachineSizesOperations` - """ - api_version = self._get_api_version('virtual_machine_sizes') - if api_version == '2021-10-01': - from ..v2021_10_01.aio.operations import VirtualMachineSizesOperations as OperationClass - elif api_version == '2022-01-01-preview': - from ..v2022_01_01_preview.aio.operations import VirtualMachineSizesOperations as OperationClass - elif api_version == '2022-05-01': - from ..v2022_05_01.aio.operations import VirtualMachineSizesOperations as OperationClass - elif api_version == '2022-10-01': - from ..v2022_10_01.aio.operations import VirtualMachineSizesOperations as OperationClass - elif api_version == '2022-10-01-preview': - from ..v2022_10_01_preview.aio.operations import VirtualMachineSizesOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'virtual_machine_sizes'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def workspace_connections(self): - """Instance depends on the API version: - - * 2021-10-01: :class:`WorkspaceConnectionsOperations` - * 2022-01-01-preview: :class:`WorkspaceConnectionsOperations` - * 2022-05-01: :class:`WorkspaceConnectionsOperations` - * 2022-10-01: :class:`WorkspaceConnectionsOperations` - * 2022-10-01-preview: :class:`WorkspaceConnectionsOperations` - """ - api_version = self._get_api_version('workspace_connections') - if api_version == '2021-10-01': - from ..v2021_10_01.aio.operations import WorkspaceConnectionsOperations as OperationClass - elif api_version == '2022-01-01-preview': - from ..v2022_01_01_preview.aio.operations import WorkspaceConnectionsOperations as OperationClass - elif api_version == '2022-05-01': - from ..v2022_05_01.aio.operations import WorkspaceConnectionsOperations as OperationClass - elif api_version == '2022-10-01': - from ..v2022_10_01.aio.operations import WorkspaceConnectionsOperations as OperationClass - elif api_version == '2022-10-01-preview': - from ..v2022_10_01_preview.aio.operations import WorkspaceConnectionsOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'workspace_connections'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def workspace_features(self): - """Instance depends on the API version: - - * 2021-10-01: :class:`WorkspaceFeaturesOperations` - * 2022-01-01-preview: :class:`WorkspaceFeaturesOperations` - * 2022-05-01: :class:`WorkspaceFeaturesOperations` - * 2022-10-01: :class:`WorkspaceFeaturesOperations` - * 2022-10-01-preview: :class:`WorkspaceFeaturesOperations` - """ - api_version = self._get_api_version('workspace_features') - if api_version == '2021-10-01': - from ..v2021_10_01.aio.operations import WorkspaceFeaturesOperations as OperationClass - elif api_version == '2022-01-01-preview': - from ..v2022_01_01_preview.aio.operations import WorkspaceFeaturesOperations as OperationClass - elif api_version == '2022-05-01': - from ..v2022_05_01.aio.operations import WorkspaceFeaturesOperations as OperationClass - elif api_version == '2022-10-01': - from ..v2022_10_01.aio.operations import WorkspaceFeaturesOperations as OperationClass - elif api_version == '2022-10-01-preview': - from ..v2022_10_01_preview.aio.operations import WorkspaceFeaturesOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'workspace_features'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def workspaces(self): - """Instance depends on the API version: - - * 2021-10-01: :class:`WorkspacesOperations` - * 2022-01-01-preview: :class:`WorkspacesOperations` - * 2022-05-01: :class:`WorkspacesOperations` - * 2022-10-01: :class:`WorkspacesOperations` - * 2022-10-01-preview: :class:`WorkspacesOperations` - """ - api_version = self._get_api_version('workspaces') - if api_version == '2021-10-01': - from ..v2021_10_01.aio.operations import WorkspacesOperations as OperationClass - elif api_version == '2022-01-01-preview': - from ..v2022_01_01_preview.aio.operations import WorkspacesOperations as OperationClass - elif api_version == '2022-05-01': - from ..v2022_05_01.aio.operations import WorkspacesOperations as OperationClass - elif api_version == '2022-10-01': - from ..v2022_10_01.aio.operations import WorkspacesOperations as OperationClass - elif api_version == '2022-10-01-preview': - from ..v2022_10_01_preview.aio.operations import WorkspacesOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'workspaces'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - async def close(self): - await self._client.close() - async def __aenter__(self): - await self._client.__aenter__() - return self - async def __aexit__(self, *exc_details): - await self._client.__aexit__(*exc_details) diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/aio/_configuration.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/aio/_configuration.py deleted file mode 100644 index bc1487b047df..000000000000 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/aio/_configuration.py +++ /dev/null @@ -1,67 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- -from typing import Any, TYPE_CHECKING - -from azure.core.configuration import Configuration -from azure.core.pipeline import policies -from azure.mgmt.core.policies import ARMHttpLoggingPolicy, AsyncARMChallengeAuthenticationPolicy - -from .._version import VERSION - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from azure.core.credentials_async import AsyncTokenCredential - -class AzureMachineLearningWorkspacesConfiguration(Configuration): - """Configuration for AzureMachineLearningWorkspaces. - - Note that all parameters used to create this instance are saved as instance - attributes. - - :param credential: Credential needed for the client to connect to Azure. - :type credential: ~azure.core.credentials_async.AsyncTokenCredential - :param subscription_id: The ID of the target subscription. - :type subscription_id: str - """ - - def __init__( - self, - credential: "AsyncTokenCredential", - subscription_id: str, - **kwargs # type: Any - ) -> None: - if credential is None: - raise ValueError("Parameter 'credential' must not be None.") - if subscription_id is None: - raise ValueError("Parameter 'subscription_id' must not be None.") - super(AzureMachineLearningWorkspacesConfiguration, self).__init__(**kwargs) - - self.credential = credential - self.subscription_id = subscription_id - self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default']) - kwargs.setdefault('sdk_moniker', 'azure-mgmt-machinelearningservices/{}'.format(VERSION)) - self._configure(**kwargs) - - def _configure( - self, - **kwargs: Any - ) -> None: - self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) - self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) - self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) - self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) - self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs) - self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs) - self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) - self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs) - self.authentication_policy = kwargs.get('authentication_policy') - if self.credential and not self.authentication_policy: - self.authentication_policy = AsyncARMChallengeAuthenticationPolicy(self.credential, *self.credential_scopes, **kwargs) diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/models.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/models.py deleted file mode 100644 index c810600d5dee..000000000000 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/models.py +++ /dev/null @@ -1,14 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- -from .dataset_dataplane.models import * -from .model_dataplane.models import * -from .registry_discovery.models import * -from .runhistory.models import * -from .v2020_09_01_dataplanepreview.models import * -from .v2021_10_01.models import * -from .v2021_10_01_dataplanepreview.models import * -from .v2022_10_01.models import * diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/py.typed b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/py.typed deleted file mode 100644 index e5aff4f83af8..000000000000 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/py.typed +++ /dev/null @@ -1 +0,0 @@ -# Marker file for PEP 561. \ No newline at end of file diff --git a/sdk/ml/azure-ai-ml/scripts/regenerate_restclient.py b/sdk/ml/azure-ai-ml/scripts/regenerate_restclient.py index 32e58ec55e43..ff07da3179a0 100644 --- a/sdk/ml/azure-ai-ml/scripts/regenerate_restclient.py +++ b/sdk/ml/azure-ai-ml/scripts/regenerate_restclient.py @@ -122,8 +122,6 @@ def regenerate_restclient(api_tag, verbose): "autorest", "--python", "--track2", - "--version=3.6.2", - "--use=@autorest/python@5.12.6", f"--python-sdks-folder={restclient_path.absolute()}", "--package-version=0.1.0", tag_arg, diff --git a/sdk/ml/azure-ai-ml/swagger/machinelearningservices/resource-manager/readme.md b/sdk/ml/azure-ai-ml/swagger/machinelearningservices/resource-manager/readme.md index 3e70e32a51a1..596d42405453 100644 --- a/sdk/ml/azure-ai-ml/swagger/machinelearningservices/resource-manager/readme.md +++ b/sdk/ml/azure-ai-ml/swagger/machinelearningservices/resource-manager/readme.md @@ -32,7 +32,7 @@ openapi-type: arm These settings apply only when `--tag=model-dataplane` is specified on the command line. -```yaml +```yaml $(tag) == 'model-dataplane' input-file: - Microsoft.MachineLearningServices/stable/model-dataplane/swagger.json output-folder: $(python-sdks-folder)/model_dataplane @@ -42,7 +42,7 @@ output-folder: $(python-sdks-folder)/model_dataplane These settings apply only when `--tag=dataset-dataplane` is specified on the command line. -```yaml +```yaml $(tag) == 'dataset-dataplane' input-file: - Microsoft.MachineLearningServices/stable/dataset-dataplane/swagger.json output-folder: $(python-sdks-folder)/dataset_dataplane @@ -52,7 +52,7 @@ output-folder: $(python-sdks-folder)/dataset_dataplane These settings apply only when `--tag=v2022-05-01` is specified on the command line. -```yaml +```yaml $(tag) == 'v2022-05-01' input-file: - Microsoft.MachineLearningServices/stable/2022-05-01/mfe.json - Microsoft.MachineLearningServices/stable/2022-05-01/machineLearningServices.json @@ -64,7 +64,7 @@ output-folder: $(python-sdks-folder)/v2022_05_01 These settings apply only when `--tag=v2022-02-01-preview` is specified on the command line. -```yaml +```yaml $(tag) == 'v2022-02-01-preview' input-file: - Microsoft.MachineLearningServices/preview/2022-02-01-preview/mfe.json output-folder: $(python-sdks-folder)/v2022_02_01_preview @@ -74,7 +74,7 @@ output-folder: $(python-sdks-folder)/v2022_02_01_preview These settings apply only when `--tag=v2022-01-01-preview` is specified on the command line. -```yaml +```yaml $(tag) == 'v2022-01-01-preview' input-file: - Microsoft.MachineLearningServices/preview/2022-01-01-preview/machineLearningServices.json - Microsoft.MachineLearningServices/preview/2022-01-01-preview/workspaceFeatures.json @@ -83,9 +83,9 @@ output-folder: $(python-sdks-folder)/v2022_01_01_preview ### Tag: mfe-dataplane-preview -These settings apply only when `--v2020-09-01-dataplanepreview` is specified on the command line. +These settings apply only when `--tag=v2020-09-01-dataplanepreview` is specified on the command line. -```yaml +```yaml $(tag) == 'v2020-09-01-dataplanepreview' input-file: - Microsoft.MachineLearningServices/preview/2020-09-01-dataplanepreview/mfe.json output-folder: $(python-sdks-folder)/v2020_09_01_dataplanepreview @@ -93,9 +93,9 @@ output-folder: $(python-sdks-folder)/v2020_09_01_dataplanepreview ### Tag: mfe-dataplane-preview-10-01 -These settings apply only when `--v2021-10-01-dataplanepreview` is specified on the command line. +These settings apply only when `--tag=v2021-10-01-dataplanepreview` is specified on the command line. -```yaml +```yaml $(tag) == 'v2021-10-01-dataplanepreview' input-file: - Microsoft.MachineLearningServices/preview/2021-10-01-dataplanepreview/mfe.json output-folder: $(python-sdks-folder)/v2021_10_01_dataplanepreview @@ -105,7 +105,7 @@ output-folder: $(python-sdks-folder)/v2021_10_01_dataplanepreview These settings apply only when `--tag=runhistory` is specified on the command line. -```yaml +```yaml $(tag) == 'runhistory' input-file: - Microsoft.MachineLearningServices/preview/runhistory/run-history.json output-folder: $(python-sdks-folder)/runhistory @@ -115,7 +115,7 @@ output-folder: $(python-sdks-folder)/runhistory These settings apply only when `--tag=workspace-dataplane` is specified on the command line. -```yaml +```yaml $(tag) == 'workspace-dataplane' input-file: - Microsoft.MachineLearningServices/preview/workspace-dataplane/swagger.json output-folder: $(python-sdks-folder)/workspace_dataplane @@ -125,7 +125,7 @@ output-folder: $(python-sdks-folder)/workspace_dataplane These settings apply only when `--tag=registry-discovery` is specified on the command line. -```yaml +```yaml $(tag) == 'registry-discovery' input-file: - Microsoft.MachineLearningServices/preview/registry-discovery/registry-discovery.json output-folder: $(python-sdks-folder)/registry_discovery @@ -135,7 +135,7 @@ output-folder: $(python-sdks-folder)/registry_discovery These settings apply only when `--tag=v2022-10-01-preview` is specified on the command line. -```yaml +```yaml $(tag) == 'v2022-10-01-preview' input-file: - Microsoft.MachineLearningServices/preview/2022-10-01-preview/machineLearningServices.json - Microsoft.MachineLearningServices/preview/2022-10-01-preview/registries.json @@ -146,9 +146,9 @@ output-folder: $(python-sdks-folder)/v2022_10_01_preview ### Tag: v2023-02-01-preview -These settings apply only when `--tag=v2023-0201-preview` is specified on the command line. +These settings apply only when `--tag=v2023-02-01-preview` is specified on the command line. -```yaml +```yaml $(tag) == 'v2023-02-01-preview' input-file: - Microsoft.MachineLearningServices/preview/2023-02-01-preview/machineLearningServices.json - Microsoft.MachineLearningServices/preview/2023-02-01-preview/registries.json @@ -161,7 +161,7 @@ output-folder: $(python-sdks-folder)/v2023_02_01_preview These settings apply only when `--tag=v2022-12-01-preview` is specified on the command line. -```yaml +```yaml $(tag) == 'v2022-12-01-preview' input-file: - Microsoft.MachineLearningServices/preview/2022-12-01-preview/machineLearningServices.json - Microsoft.MachineLearningServices/preview/2022-12-01-preview/registries.json @@ -174,7 +174,7 @@ output-folder: $(python-sdks-folder)/v2022_12_01_preview These settings apply only when `--tag=v2023-04-01-preview` is specified on the command line. -```yaml +```yaml $(tag) == 'v2023-04-01-preview' input-file: - Microsoft.MachineLearningServices/preview/2023-04-01-preview/machineLearningServices.json - Microsoft.MachineLearningServices/preview/2023-04-01-preview/registries.json @@ -187,7 +187,7 @@ output-folder: $(python-sdks-folder)/v2023_04_01_preview These settings apply only when `--tag=v2023-04-01` is specified on the command line. -```yaml +```yaml $(tag) == 'v2023-04-01' input-file: - Microsoft.MachineLearningServices/stable/2023-04-01/machineLearningServices.json - Microsoft.MachineLearningServices/stable/2023-04-01/registries.json @@ -200,7 +200,7 @@ output-folder: $(python-sdks-folder)/v2023_04_01 These settings apply only when `--tag=v2023-06-01-preview` is specified on the command line. -```yaml +```yaml $(tag) == 'v2023-06-01-preview' input-file: - Microsoft.MachineLearningServices/preview/2023-06-01-preview/machineLearningServices.json - Microsoft.MachineLearningServices/preview/2023-06-01-preview/registries.json @@ -214,7 +214,7 @@ output-folder: $(python-sdks-folder)/v2023_06_01_preview These settings apply only when `--tag=v2023-08-01-preview` is specified on the command line. -```yaml +```yaml $(tag) == 'v2023-08-01-preview' input-file: - Microsoft.MachineLearningServices/preview/2023-08-01-preview/machineLearningServices.json - Microsoft.MachineLearningServices/preview/2023-08-01-preview/registries.json @@ -228,7 +228,7 @@ output-folder: $(python-sdks-folder)/v2023_08_01_preview These settings apply only when `--tag=v2023-10-01` is specified on the command line. -```yaml +```yaml $(tag) == 'v2023-10-01' input-file: - Microsoft.MachineLearningServices/stable/2023-10-01/machineLearningServices.json - Microsoft.MachineLearningServices/stable/2023-10-01/registries.json @@ -241,7 +241,7 @@ output-folder: $(python-sdks-folder)/v2023_10_01 These settings apply only when `--tag=v2024-01-01-preview` is specified on the command line. -```yaml +```yaml $(tag) == 'v2024-01-01-preview' input-file: - Microsoft.MachineLearningServices/preview/2024-01-01-preview/machineLearningServices.json - Microsoft.MachineLearningServices/preview/2024-01-01-preview/registries.json @@ -255,7 +255,7 @@ output-folder: $(python-sdks-folder)/v2024_01_01_preview These settings apply only when `--tag=v2024-04-01-preview` is specified on the command line. -```yaml +```yaml $(tag) == 'v2024-04-01-preview' input-file: - Microsoft.MachineLearningServices/preview/2024-04-01-preview/machineLearningServices.json - Microsoft.MachineLearningServices/preview/2024-04-01-preview/registries.json @@ -269,7 +269,7 @@ output-folder: $(python-sdks-folder)/v2024_04_01_preview These settings apply only when `--tag=v2024-07-01-preview` is specified on the command line. -```yaml +```yaml $(tag) == 'v2024-07-01-preview' input-file: - Microsoft.MachineLearningServices/preview/2024-07-01-preview/machineLearningServices.json - Microsoft.MachineLearningServices/preview/2024-07-01-preview/registries.json @@ -283,7 +283,7 @@ output-folder: $(python-sdks-folder)/v2024_07_01_preview These settings apply only when `--tag=v2024-10-01-preview` is specified on the command line. -```yaml +```yaml $(tag) == 'v2024-10-01-preview' input-file: - Microsoft.MachineLearningServices/preview/2024-10-01-preview/machineLearningServices.json - Microsoft.MachineLearningServices/preview/2024-10-01-preview/registries.json @@ -297,7 +297,7 @@ output-folder: $(python-sdks-folder)/v2024_10_01_preview These settings apply only when `--tag=v2025-01-01-preview` is specified on the command line. -```yaml +```yaml $(tag) == 'v2025-01-01-preview' input-file: - Microsoft.MachineLearningServices/preview/2025-01-01-preview/machineLearningServices.json - Microsoft.MachineLearningServices/preview/2025-01-01-preview/registries.json @@ -317,46 +317,6 @@ input-file: output-folder: $(python-sdks-folder)/v2024_04_01_dataplanepreview ``` -### Tag: multiapi - -These settings apply only when `--multiapi` is specified on the command line. - -```yaml -clear-output-folder: true -batch: - - tag: model-dataplane - - tag: dataset-dataplane - - tag: v2022-05-01 - - tag: v2022-02-01-preview - - tag: v2022-01-01-preview - - tag: runhistory - - tag: v2020-09-01-dataplanepreview - - tag: registry-discovery - - tag: v2022-10-01-preview - - tag: v2022-12-01-preview - - tag: v2023-02-01-preview - - tag: v2023-04-01-preview - - tag: v2023-04-01 - - tag: v2023-06-01-preview - - tag: v2023-08-01-preview - - tag: v2023-10-01 - - tag: v2024-01-01-preview - - tag: v2024-04-01-preview - - tag: v2024-07-01-preview - - tag: v2024-10-01-preview - #unstable tags - - tag: v2021-10-01-dataplanepreview - - multiapiscript: true -``` - -### Multi API Script - -```yaml -clear-output-folder: false -output-folder: $(python-sdks-folder) -perform-load: false -``` - --- ## Code Generation @@ -368,92 +328,5 @@ This is not used by Autorest itself. ```yaml swagger-to-sdk: - - repo: azure-sdk-for-net - - repo: azure-sdk-for-go - repo: azure-sdk-for-python - - repo: azure-sdk-for-js - - repo: azure-sdk-for-node - - repo: azure-cli-extensions -``` - -### C# - -These settings apply only when `--csharp` is specified on the command line. -Please also specify `--csharp-sdks-folder=`. - -```yaml -csharp: - azure-arm: true - license-header: MICROSOFT_MIT_NO_VERSION - namespace: Microsoft.Azure.Management.MachineLearningServices - output-folder: $(csharp-sdks-folder)/src/Generated - clear-output-folder: true -``` - -### Java - -These settings apply only when `--java` is specified on the command line. -Please also specify `--azure-libraries-for-java-folder=`. - -```yaml -azure-arm: true -fluent: true -namespace: com.microsoft.azure.management.machinelearning.services -license-header: MICROSOFT_MIT_NO_CODEGEN -payload-flattening-threshold: 1 -output-folder: $(azure-libraries-for-java-folder)/azure-mgmt-machinelearning/services -``` - -#### Java multi-api - -```yaml -batch: - - tag: package-2020-06-01 -``` - -#### Tag: package-2020-06-01 and java - -These settings apply only when `--tag=package-2020-06-01 --java` is specified on the command line. -Please also specify `--azure-libraries-for-java=`. - -```yaml -java: - namespace: com.microsoft.azure.management.machinelearningservices.v2020_06_01 - output-folder: $(azure-libraries-for-java-folder)/sdk/machinelearningservices/mgmt-v2020_06_01 -regenerate-manager: true -generate-interface: true -``` - -### Multi-API/Profile support for AutoRest v3 generators - -AutoRest V3 generators require the use of `--tag=all-api-versions` to select api files. - -This block is updated by an automatic script. Edits may be lost! - -```yaml -# include the azure profile definitions from the standard location -require: $(this-folder)/../../../profiles/readme.md - -# all the input files across all versions -input-file: - - $(this-folder)/Microsoft.MachineLearningServices/stable/2020-06-01/machineLearningServices.json - - $(this-folder)/Microsoft.MachineLearningServices/stable/2020-04-01/machineLearningServices.json - - $(this-folder)/Microsoft.MachineLearningServices/stable/2020-03-01/machineLearningServices.json - - $(this-folder)/Microsoft.MachineLearningServices/stable/2020-01-01/machineLearningServices.json - - $(this-folder)/Microsoft.MachineLearningServices/stable/2019-11-01/machineLearningServices.json - - $(this-folder)/Microsoft.MachineLearningServices/stable/2019-06-01/machineLearningServices.json - - $(this-folder)/Microsoft.MachineLearningServices/stable/2019-05-01/machineLearningServices.json - - $(this-folder)/Microsoft.MachineLearningServices/stable/2018-11-19/machineLearningServices.json - - $(this-folder)/Microsoft.MachineLearningServices/preview/2020-05-01-preview/machineLearningServices.json - - $(this-folder)/Microsoft.MachineLearningServices/preview/2020-04-01-preview/machineLearningServices.json - - $(this-folder)/Microsoft.MachineLearningServices/preview/2020-02-18-preview/machineLearningServices.json - - $(this-folder)/Microsoft.MachineLearningServices/preview/2018-03-01-preview/machineLearningServices.json -``` - -If there are files that should not be in the `all-api-versions` set, -uncomment the `exclude-file` section below and add the file paths. - -```yaml -#exclude-file: -# - $(this-folder)/Microsoft.Example/stable/2010-01-01/somefile.json ``` diff --git a/sdk/ml/azure-ai-ml/tests/pipeline_job/unittests/test_pipeline_job_schema.py b/sdk/ml/azure-ai-ml/tests/pipeline_job/unittests/test_pipeline_job_schema.py index 4550434a959c..4e924a4e99ef 100644 --- a/sdk/ml/azure-ai-ml/tests/pipeline_job/unittests/test_pipeline_job_schema.py +++ b/sdk/ml/azure-ai-ml/tests/pipeline_job/unittests/test_pipeline_job_schema.py @@ -1428,6 +1428,11 @@ def test_automl_node_in_pipeline_load_dump( "azure.ai.ml.operations._operation_orchestrator.OperationOrchestrator.get_asset_arm_id", return_value="xxx" ) mocker.patch("azure.ai.ml.operations._job_operations._upload_and_generate_remote_uri", return_value="yyy") + # Prevent token refresh check which fails on Windows with Mock credentials + mocker.patch( + "azure.mgmt.core.policies._authentication.ARMChallengeAuthenticationPolicy._need_new_token", + return_value=False, + ) mock_machinelearning_client.jobs._resolve_arm_id_or_upload_dependencies(pipeline) automl_job = pipeline.jobs[job_key] From 3c7a13fc189609684009434a0d09b1cbea41ba4c Mon Sep 17 00:00:00 2001 From: Matthew Metcalf Date: Fri, 23 Jan 2026 10:33:17 -0800 Subject: [PATCH 05/18] App Config Fixing + Adding Samples (#44790) * Fixing/Adding samples * Apply suggestion from @Copilot Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> * Apply suggestion from @Copilot Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> * Apply suggestions from code review Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> * Update README.md * removing fail as we create the keys * fixing test mypy issue * update tests * fixing sample * format fixes * Fixing sample + disable one live test --------- Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- .../assets.json | 2 +- .../samples/async_snapshot_sample.py | 95 ++++++++++++++---- .../samples/snapshot_sample.py | 96 ++++++++++++++----- .../tests/aio/test_async_snapshots.py | 1 + .../tests/test_snapshots.py | 1 + .../azure-appconfiguration/README.md | 14 ++- .../azure-appconfiguration/assets.json | 2 +- .../samples/hello_world_entra_id_sample.py | 70 ++++++++++++++ .../hello_world_sample_entra_id_and_bleu.py | 11 +-- .../test_azure_appconfiguration_client.py | 5 +- 10 files changed, 247 insertions(+), 50 deletions(-) create mode 100644 sdk/appconfiguration/azure-appconfiguration/samples/hello_world_entra_id_sample.py diff --git a/sdk/appconfiguration/azure-appconfiguration-provider/assets.json b/sdk/appconfiguration/azure-appconfiguration-provider/assets.json index c09c492b8eda..b612cb2b1b0f 100644 --- a/sdk/appconfiguration/azure-appconfiguration-provider/assets.json +++ b/sdk/appconfiguration/azure-appconfiguration-provider/assets.json @@ -2,5 +2,5 @@ "AssetsRepo": "Azure/azure-sdk-assets", "AssetsRepoPrefixPath": "python", "TagPrefix": "python/appconfiguration/azure-appconfiguration-provider", - "Tag": "python/appconfiguration/azure-appconfiguration-provider_32bd63579a" + "Tag": "python/appconfiguration/azure-appconfiguration-provider_3e69808293" } diff --git a/sdk/appconfiguration/azure-appconfiguration-provider/samples/async_snapshot_sample.py b/sdk/appconfiguration/azure-appconfiguration-provider/samples/async_snapshot_sample.py index 413010a5b789..281061b14b46 100644 --- a/sdk/appconfiguration/azure-appconfiguration-provider/samples/async_snapshot_sample.py +++ b/sdk/appconfiguration/azure-appconfiguration-provider/samples/async_snapshot_sample.py @@ -7,49 +7,110 @@ import asyncio from azure.appconfiguration.provider.aio import load from azure.appconfiguration.provider import SettingSelector -from sample_utilities import get_client_modifications +from azure.appconfiguration.aio import AzureAppConfigurationClient # type:ignore +from azure.appconfiguration import ( # type:ignore + ConfigurationSettingsFilter, + ConfigurationSetting, + FeatureFlagConfigurationSetting, +) +from azure.identity.aio import DefaultAzureCredential import os +import uuid async def main(): - kwargs = get_client_modifications() - connection_string = os.environ["APPCONFIGURATION_CONNECTION_STRING"] + endpoint = os.environ["APPCONFIGURATION_ENDPOINT_STRING"] + credential = DefaultAzureCredential() - # Loading configuration settings from a snapshot - # Note: The snapshot must already exist in your App Configuration store - snapshot_name = "my-snapshot-name" + # Step 1: Create a snapshot + # First, we'll create some configuration settings and then create a snapshot containing them + client = AzureAppConfigurationClient(endpoint, credential) + # Create sample configuration settings (these will be included in the snapshot) + sample_settings = [ + ConfigurationSetting(key="app/settings/message", value="Hello from snapshot!"), + ConfigurationSetting(key="app/settings/fontSize", value="14"), + ConfigurationSetting(key="app/settings/backgroundColor", value="#FFFFFF"), + ] + + # Create a feature flag (also included in the snapshot) + sample_feature_flag = FeatureFlagConfigurationSetting( + feature_id="Beta", + enabled=True, + description="Beta feature flag from snapshot sample", + ) + + # Override settings with "prod" label (used in mixed selects, not in snapshot) + override_settings = [ + ConfigurationSetting(key="override.message", value="Production override!", label="prod"), + ConfigurationSetting(key="override.fontSize", value="16", label="prod"), + ] + + print("Creating sample configuration settings...") + for setting in sample_settings: + await client.set_configuration_setting(setting) + print(f" Created: {setting.key} = {setting.value}") + + # Create the feature flag + await client.set_configuration_setting(sample_feature_flag) + print(f" Created feature flag: {sample_feature_flag.feature_id} = {sample_feature_flag.enabled}") + + for setting in override_settings: + await client.set_configuration_setting(setting) + print(f" Created: {setting.key} = {setting.value} (label: {setting.label})") + + # Generate a unique snapshot name + snapshot_name = f"sample-snapshot-{uuid.uuid4().hex[:8]}" + + # Create snapshot with filters for app settings and feature flags (retention_period=3600 seconds = 1 hour) + snapshot_filters = [ + ConfigurationSettingsFilter(key="app/*"), + ConfigurationSettingsFilter(key=".appconfig.featureflag/*"), + ] + + poller = await client.begin_create_snapshot(name=snapshot_name, filters=snapshot_filters, retention_period=3600) + created_snapshot = await poller.result() + print(f"Created snapshot: {created_snapshot.name} with status: {created_snapshot.status}") + + # Step 2: Loading configuration settings from the snapshot snapshot_selects = [SettingSelector(snapshot_name=snapshot_name)] - config = await load(connection_string=connection_string, selects=snapshot_selects, **kwargs) + config = await load(endpoint=endpoint, credential=credential, selects=snapshot_selects) print("Configuration settings from snapshot:") for key, value in config.items(): print(f"{key}: {value}") + await config.close() - # You can also combine snapshot-based selectors with regular selectors - # The snapshot settings and filtered settings will be merged, with later selectors taking precedence + # Step 3: Combine snapshot with regular selectors (later selectors take precedence) mixed_selects = [ SettingSelector(snapshot_name=snapshot_name), # Load all settings from snapshot SettingSelector(key_filter="override.*", label_filter="prod"), # Also load specific override settings ] - config_mixed = await load(connection_string=connection_string, selects=mixed_selects, **kwargs) + config_mixed = await load(endpoint=endpoint, credential=credential, selects=mixed_selects) print("\nMixed configuration (snapshot + filtered settings):") for key, value in config_mixed.items(): print(f"{key}: {value}") + await config_mixed.close() - # Loading feature flags from a snapshot - # To load feature flags from a snapshot, include the snapshot selector in the 'selects' parameter and set feature_flag_enabled=True. + # Step 4: Load feature flags from the snapshot (requires feature_flag_enabled=True) feature_flag_selects = [SettingSelector(snapshot_name=snapshot_name)] config_with_flags = await load( - connection_string=connection_string, + endpoint=endpoint, + credential=credential, selects=feature_flag_selects, feature_flag_enabled=True, - **kwargs, - ) - print( - f"\nConfiguration includes feature flags: {any(key.startswith('.appconfig.featureflag/') for key in config_with_flags.keys())}" ) + print(f"\nFeature flags loaded: {'feature_management' in config_with_flags}") + if "feature_management" in config_with_flags: + feature_flags = config_with_flags["feature_management"].get("feature_flags", []) + for flag in feature_flags: + print(f" {flag['id']}: enabled={flag['enabled']}") + + await client.close() + await config_with_flags.close() + await credential.close() + if __name__ == "__main__": asyncio.run(main()) diff --git a/sdk/appconfiguration/azure-appconfiguration-provider/samples/snapshot_sample.py b/sdk/appconfiguration/azure-appconfiguration-provider/samples/snapshot_sample.py index 5222879e5431..2f0b650ad74b 100644 --- a/sdk/appconfiguration/azure-appconfiguration-provider/samples/snapshot_sample.py +++ b/sdk/appconfiguration/azure-appconfiguration-provider/samples/snapshot_sample.py @@ -3,52 +3,104 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # ------------------------------------------------------------------------- - -from azure.appconfiguration.provider import load, SettingSelector -from sample_utilities import get_authority, get_credential, get_client_modifications import os +import uuid +from azure.identity import DefaultAzureCredential +from azure.appconfiguration.provider import load, SettingSelector +from azure.appconfiguration import ( # type:ignore + AzureAppConfigurationClient, + ConfigurationSettingsFilter, + ConfigurationSnapshot, + ConfigurationSetting, + FeatureFlagConfigurationSetting, +) + +endpoint = os.environ["APPCONFIGURATION_ENDPOINT_STRING"] +credential = DefaultAzureCredential() + +# Step 1: Create a snapshot +# First, we'll create some configuration settings and then create a snapshot containing them +client = AzureAppConfigurationClient(endpoint, credential) + + +# Create sample configuration settings (these will be included in the snapshot) +sample_settings = [ + ConfigurationSetting(key="app/settings/message", value="Hello from snapshot!"), + ConfigurationSetting(key="app/settings/fontSize", value="14"), + ConfigurationSetting(key="app/settings/backgroundColor", value="#FFFFFF"), +] + +# Create a feature flag (also included in the snapshot) +sample_feature_flag = FeatureFlagConfigurationSetting( + feature_id="Beta", + enabled=True, + description="Beta feature flag from snapshot sample", +) + +# Override settings with "prod" label (used in mixed selects, not in snapshot) +override_settings = [ + ConfigurationSetting(key="override.message", value="Production override!", label="prod"), + ConfigurationSetting(key="override.fontSize", value="16", label="prod"), +] + +print("Creating sample configuration settings...") +for setting in sample_settings: + client.set_configuration_setting(setting) + print(f" Created: {setting.key} = {setting.value}") + +# Create the feature flag +client.set_configuration_setting(sample_feature_flag) +print(f" Created feature flag: {sample_feature_flag.feature_id} = {sample_feature_flag.enabled}") -endpoint = os.environ.get("APPCONFIGURATION_ENDPOINT_STRING") -authority = get_authority(endpoint) -credential = get_credential(authority) -kwargs = get_client_modifications() +for setting in override_settings: + client.set_configuration_setting(setting) + print(f" Created: {setting.key} = {setting.value} (label: {setting.label})") -# Connecting to Azure App Configuration using AAD -config = load(endpoint=endpoint, credential=credential, **kwargs) +# Generate a unique snapshot name +snapshot_name = f"sample-snapshot-{uuid.uuid4().hex[:8]}" -# Loading configuration settings from a snapshot -# Note: The snapshot must already exist in your App Configuration store -snapshot_name = "my-snapshot-name" +# Create snapshot with filters for app settings and feature flags (retention_period=3600 seconds = 1 hour) +snapshot_filters = [ + ConfigurationSettingsFilter(key="app/*"), + ConfigurationSettingsFilter(key=".appconfig.featureflag/*"), +] + +created_snapshot = client.begin_create_snapshot( + name=snapshot_name, filters=snapshot_filters, retention_period=3600 +).result() +print(f"Created snapshot: {created_snapshot.name} with status: {created_snapshot.status}") + + +# Step 2: Loading configuration settings from the snapshot snapshot_selects = [SettingSelector(snapshot_name=snapshot_name)] -config = load(endpoint=endpoint, credential=credential, selects=snapshot_selects, **kwargs) +config = load(endpoint=endpoint, credential=credential, selects=snapshot_selects) print("Configuration settings from snapshot:") for key, value in config.items(): print(f"{key}: {value}") -# You can also combine snapshot-based selectors with regular selectors -# The snapshot settings and filtered settings will be merged, with later selectors taking precedence +# Step 3: Combine snapshot with regular selectors (later selectors take precedence) mixed_selects = [ SettingSelector(snapshot_name=snapshot_name), # Load all settings from snapshot SettingSelector(key_filter="override.*", label_filter="prod"), # Also load specific override settings ] -config_mixed = load(endpoint=endpoint, credential=credential, selects=mixed_selects, **kwargs) +config_mixed = load(endpoint=endpoint, credential=credential, selects=mixed_selects) print("\nMixed configuration (snapshot + filtered settings):") for key, value in config_mixed.items(): print(f"{key}: {value}") -# Loading feature flags from a snapshot -# To load feature flags from a snapshot, include the snapshot selector in the `selects` parameter and set `feature_flag_enabled=True`. +# Step 4: Load feature flags from the snapshot (requires feature_flag_enabled=True) feature_flag_selects = [SettingSelector(snapshot_name=snapshot_name)] config_with_flags = load( endpoint=endpoint, credential=credential, selects=feature_flag_selects, feature_flag_enabled=True, - **kwargs, ) -print( - f"\nConfiguration includes feature flags: {any(key.startswith('.appconfig.featureflag/') for key in config_with_flags.keys())}" -) +print(f"\nFeature flags loaded: {'feature_management' in config_with_flags}") +if "feature_management" in config_with_flags: + feature_flags = config_with_flags["feature_management"].get("feature_flags", []) + for flag in feature_flags: + print(f" {flag['id']}: enabled={flag['enabled']}") diff --git a/sdk/appconfiguration/azure-appconfiguration-provider/tests/aio/test_async_snapshots.py b/sdk/appconfiguration/azure-appconfiguration-provider/tests/aio/test_async_snapshots.py index 50ae76f0bc1c..926542d82872 100644 --- a/sdk/appconfiguration/azure-appconfiguration-provider/tests/aio/test_async_snapshots.py +++ b/sdk/appconfiguration/azure-appconfiguration-provider/tests/aio/test_async_snapshots.py @@ -168,6 +168,7 @@ async def test_snapshot_selector_parameter_validation_in_provider(self, appconfi feature_flag_selectors=[SettingSelector(snapshot_name="test-snapshot")], ) + @pytest.mark.live_test_only # Needed to fix an azure core dependency compatibility issue @app_config_decorator_async @recorded_by_proxy_async async def test_create_snapshot_and_load_provider(self, appconfiguration_connection_string, **kwargs): diff --git a/sdk/appconfiguration/azure-appconfiguration-provider/tests/test_snapshots.py b/sdk/appconfiguration/azure-appconfiguration-provider/tests/test_snapshots.py index 11865016978d..3a31c3ac874c 100644 --- a/sdk/appconfiguration/azure-appconfiguration-provider/tests/test_snapshots.py +++ b/sdk/appconfiguration/azure-appconfiguration-provider/tests/test_snapshots.py @@ -166,6 +166,7 @@ def test_snapshot_selector_parameter_validation_in_provider(self, appconfigurati feature_flag_selectors=[SettingSelector(snapshot_name="test-snapshot")], ) + @pytest.mark.live_test_only # Needed to fix an azure core dependency compatibility issue @app_config_decorator @recorded_by_proxy def test_create_snapshot_and_load_provider(self, appconfiguration_connection_string, **kwargs): diff --git a/sdk/appconfiguration/azure-appconfiguration/README.md b/sdk/appconfiguration/azure-appconfiguration/README.md index 4b59281924b0..1ba482518ad6 100644 --- a/sdk/appconfiguration/azure-appconfiguration/README.md +++ b/sdk/appconfiguration/azure-appconfiguration/README.md @@ -71,7 +71,7 @@ client = AzureAppConfigurationClient.from_connection_string(CONNECTION_STRING) -#### Use AAD token +#### Use Entra ID token Here we demonstrate using [DefaultAzureCredential][default_cred_ref] to authenticate as a service principal. However, [AzureAppConfigurationClient][configuration_client_class] @@ -79,6 +79,18 @@ accepts any [azure-identity][azure_identity] credential. See the [azure-identity][azure_identity] documentation for more information about other credentials. + + +```python + + ENDPOINT = os.environ["APPCONFIGURATION_ENDPOINT"] + credential = DefaultAzureCredential() + # Create app config client + client = AzureAppConfigurationClient(base_url=ENDPOINT, credential=credential) +``` + + + ##### Create a service principal (optional) This [Azure CLI][azure_cli] snippet shows how to create a new service principal. Before using it, replace "your-application-name" with diff --git a/sdk/appconfiguration/azure-appconfiguration/assets.json b/sdk/appconfiguration/azure-appconfiguration/assets.json index 3d5f769f30fe..dc13685404d8 100644 --- a/sdk/appconfiguration/azure-appconfiguration/assets.json +++ b/sdk/appconfiguration/azure-appconfiguration/assets.json @@ -2,5 +2,5 @@ "AssetsRepo": "Azure/azure-sdk-assets", "AssetsRepoPrefixPath": "python", "TagPrefix": "python/appconfiguration/azure-appconfiguration", - "Tag": "python/appconfiguration/azure-appconfiguration_7b8ff3a790" + "Tag": "python/appconfiguration/azure-appconfiguration_e031d16e39" } diff --git a/sdk/appconfiguration/azure-appconfiguration/samples/hello_world_entra_id_sample.py b/sdk/appconfiguration/azure-appconfiguration/samples/hello_world_entra_id_sample.py new file mode 100644 index 000000000000..e539d26ddea1 --- /dev/null +++ b/sdk/appconfiguration/azure-appconfiguration/samples/hello_world_entra_id_sample.py @@ -0,0 +1,70 @@ +# coding: utf-8 + +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +""" +FILE: hello_world_entra_id_sample.py + +DESCRIPTION: + This sample demos how to add/update/retrieve/delete configuration settings synchronously. + +USAGE: python hello_world_entra_id_sample.py + + Set the environment variables with your own values before running the sample: + 1) APPCONFIGURATION_CONNECTION_STRING: Connection String used to access the Azure App Configuration. +""" +import os +from azure.appconfiguration import AzureAppConfigurationClient +from azure.identity import DefaultAzureCredential +from azure.appconfiguration import ConfigurationSetting + + +def main(): + # [START create_app_config_client] + + ENDPOINT = os.environ["APPCONFIGURATION_ENDPOINT"] + credential = DefaultAzureCredential() + # Create app config client + client = AzureAppConfigurationClient(base_url=ENDPOINT, credential=credential) + # [END create_app_config_client] + + print("Add new configuration setting") + # [START create_config_setting] + config_setting = ConfigurationSetting( + key="MyKey", label="MyLabel", value="my value", content_type="my content type", tags={"my tag": "my tag value"} + ) + added_config_setting = client.add_configuration_setting(config_setting) + # [END create_config_setting] + print("New configuration setting:") + print(added_config_setting) + print("") + + print("Set configuration setting") + # [START set_config_setting] + added_config_setting.value = "new value" + added_config_setting.content_type = "new content type" + updated_config_setting = client.set_configuration_setting(added_config_setting) + # [END set_config_setting] + print(updated_config_setting) + print("") + + print("Get configuration setting") + # [START get_config_setting] + fetched_config_setting = client.get_configuration_setting(key="MyKey", label="MyLabel") + # [END get_config_setting] + print("Fetched configuration setting:") + print(fetched_config_setting) + print("") + + print("Delete configuration setting") + # [START delete_config_setting] + client.delete_configuration_setting(key="MyKey", label="MyLabel") + # [END delete_config_setting] + + +if __name__ == "__main__": + main() diff --git a/sdk/appconfiguration/azure-appconfiguration/samples/hello_world_sample_entra_id_and_bleu.py b/sdk/appconfiguration/azure-appconfiguration/samples/hello_world_sample_entra_id_and_bleu.py index 420b650193e2..ad91cf88cd2b 100644 --- a/sdk/appconfiguration/azure-appconfiguration/samples/hello_world_sample_entra_id_and_bleu.py +++ b/sdk/appconfiguration/azure-appconfiguration/samples/hello_world_sample_entra_id_and_bleu.py @@ -23,7 +23,7 @@ 4) AZURE_CLIENT_SECRET: Your application client secret For Azure Bleu (French Sovereign Cloud): - - Use credential_scopes: ["https://appconfig.sovcloud-api.fr/.default"] + - Use audience: ["https://appconfig.sovcloud-api.fr/"] DefaultAzureCredential will attempt multiple authentication methods: - Environment variables (AZURE_TENANT_ID, AZURE_CLIENT_ID, AZURE_CLIENT_SECRET) @@ -33,21 +33,20 @@ - Azure PowerShell - Interactive browser """ +import os +from azure.appconfiguration import AzureAppConfigurationClient +from azure.identity import DefaultAzureCredential from azure.appconfiguration import ConfigurationSetting def main(): # [START create_app_config_client_entra_id] - import os - from azure.appconfiguration import AzureAppConfigurationClient - from azure.identity import DefaultAzureCredential - ENDPOINT = os.environ["APPCONFIGURATION_ENDPOINT"] # Create app config client with Entra ID authentication credential = DefaultAzureCredential() client = AzureAppConfigurationClient( - base_url=ENDPOINT, credential=credential, credential_scopes=["https://appconfig.sovcloud-api.fr/.default"] + base_url=ENDPOINT, credential=credential, audience="https://appconfig.sovcloud-api.fr/" ) # [END create_app_config_client_entra_id] diff --git a/sdk/appconfiguration/azure-appconfiguration/tests/test_azure_appconfiguration_client.py b/sdk/appconfiguration/azure-appconfiguration/tests/test_azure_appconfiguration_client.py index cb73966af567..d52458ab630f 100644 --- a/sdk/appconfiguration/azure-appconfiguration/tests/test_azure_appconfiguration_client.py +++ b/sdk/appconfiguration/azure-appconfiguration/tests/test_azure_appconfiguration_client.py @@ -1067,7 +1067,8 @@ def test_list_snapshots(self, appconfiguration_connection_string, **kwargs): set_custom_default_matcher(compare_bodies=False, excluded_headers="x-ms-content-sha256,x-ms-date") self.set_up(appconfiguration_connection_string) - result = self.client.list_snapshots() + # Only list "ready" snapshots to avoid counting archived snapshots that may expire during test runs + result = self.client.list_snapshots(status=["ready"]) initial_snapshots = len(list(result)) variables = kwargs.pop("variables", {}) @@ -1085,7 +1086,7 @@ def test_list_snapshots(self, appconfiguration_connection_string, **kwargs): created_snapshot2 = response2.result() assert created_snapshot2.status == "ready" - result = self.client.list_snapshots() + result = self.client.list_snapshots(status=["ready"]) assert len(list(result)) == initial_snapshots + 2 self.tear_down() From 0510d7449cb3cedd087a478cf6fcae9fb56ac7dc Mon Sep 17 00:00:00 2001 From: Zixin Yao Date: Fri, 23 Jan 2026 10:43:42 -0800 Subject: [PATCH 06/18] update (#44795) Co-authored-by: Zixin Yao --- sdk/search/azure-search-documents/README.md | 97 +- .../azure/search/documents/_search_client.py | 22 +- .../documents/aio/_search_client_async.py | 22 +- .../documents/indexes/_search_index_client.py | 24 +- .../indexes/_search_indexer_client.py | 36 +- .../indexes/aio/_search_index_client.py | 24 +- .../indexes/aio/_search_indexer_client.py | 30 +- .../azure-search-documents/samples/README.md | 86 +- .../sample_agentic_retrieval_async.py | 221 + .../sample_analyze_text_async.py | 47 - .../sample_authentication_async.py | 77 +- .../sample_autocomplete_async.py | 49 - .../sample_buffered_sender_async.py | 56 - .../sample_crud_operations_async.py | 73 - .../sample_data_source_operations_async.py | 81 - .../sample_documents_buffered_sender_async.py | 76 + .../sample_documents_crud_async.py | 115 + .../async_samples/sample_facet_query_async.py | 52 - .../sample_filter_query_async.py | 54 - .../sample_get_document_async.py | 48 - .../sample_index_alias_crud_async.py | 142 + ...ample_index_alias_crud_operations_async.py | 111 - .../sample_index_analyze_text_async.py | 54 + ...ample_index_client_custom_request_async.py | 58 + .../sample_index_client_send_request_async.py | 49 - .../async_samples/sample_index_crud_async.py | 156 + .../sample_index_crud_operations_async.py | 127 - .../sample_index_synonym_map_crud_async.py | 112 + .../sample_indexer_crud_async.py | 194 + .../sample_indexer_datasource_crud_async.py | 105 + .../sample_indexers_operations_async.py | 132 - .../sample_query_autocomplete_async.py | 50 + .../sample_query_facets_async.py | 53 + .../sample_query_filter_async.py | 53 + .../sample_query_semantic_async.py | 120 + .../sample_query_session_async.py | 26 +- .../sample_query_simple_async.py | 46 + ...c.py => sample_query_suggestions_async.py} | 29 +- .../sample_query_vector_async.py | 272 + ...mple_search_client_custom_request_async.py | 54 + ...sample_search_client_send_request_async.py | 49 - .../sample_semantic_search_async.py | 71 - .../sample_simple_query_async.py | 49 - .../sample_synonym_map_operations_async.py | 80 - .../sample_vector_search_async.py | 203 - .../data/hotels_with_description_vector.json | 10855 ++++++++++++++++ .../samples/data/query_vector.json | 1538 +++ .../samples/{ => data}/synonym_map.txt | 0 .../samples/files/hotel_small.json | 252 - .../samples/sample_agentic_retrieval.py | 202 + .../samples/sample_analyze_text.py | 45 - .../samples/sample_authentication.py | 72 +- .../samples/sample_buffered_sender.py | 51 - .../samples/sample_crud_operations.py | 67 - .../samples/sample_data_source_operations.py | 72 - .../sample_documents_buffered_sender.py | 73 + .../samples/sample_documents_crud.py | 110 + .../samples/sample_facet_query.py | 49 - .../samples/sample_filter_query.py | 51 - .../samples/sample_get_document.py | 45 - .../samples/sample_index_alias_crud.py | 137 + .../sample_index_alias_crud_operations.py | 105 - .../samples/sample_index_analyze_text.py | 52 + .../sample_index_client_custom_request.py | 51 + .../sample_index_client_send_request.py | 47 - .../samples/sample_index_crud.py | 149 + .../samples/sample_index_crud_operations.py | 116 - .../samples/sample_index_synonym_map_crud.py | 106 + .../samples/sample_indexer_crud.py | 179 + .../samples/sample_indexer_datasource_crud.py | 96 + .../sample_indexer_datasource_skillset.py | 152 - .../samples/sample_indexer_workflow.py | 168 + .../samples/sample_indexers_operations.py | 123 - ...mplete.py => sample_query_autocomplete.py} | 25 +- .../samples/sample_query_facets.py | 50 + .../samples/sample_query_filter.py | 50 + .../samples/sample_query_semantic.py | 120 + .../samples/sample_query_session.py | 22 +- .../samples/sample_query_simple.py | 43 + .../samples/sample_query_suggestions.py | 44 + .../samples/sample_query_vector.py | 259 + .../sample_search_client_custom_request.py | 48 + .../sample_search_client_send_request.py | 47 - .../samples/sample_semantic_search.py | 70 - .../samples/sample_simple_query.py | 46 - .../samples/sample_suggestions.py | 47 - .../samples/sample_synonym_map_operations.py | 89 - .../samples/sample_utils.py | 8 + .../samples/sample_vector_search.py | 204 - 89 files changed, 16536 insertions(+), 3284 deletions(-) create mode 100644 sdk/search/azure-search-documents/samples/async_samples/sample_agentic_retrieval_async.py delete mode 100644 sdk/search/azure-search-documents/samples/async_samples/sample_analyze_text_async.py delete mode 100644 sdk/search/azure-search-documents/samples/async_samples/sample_autocomplete_async.py delete mode 100644 sdk/search/azure-search-documents/samples/async_samples/sample_buffered_sender_async.py delete mode 100644 sdk/search/azure-search-documents/samples/async_samples/sample_crud_operations_async.py delete mode 100644 sdk/search/azure-search-documents/samples/async_samples/sample_data_source_operations_async.py create mode 100644 sdk/search/azure-search-documents/samples/async_samples/sample_documents_buffered_sender_async.py create mode 100644 sdk/search/azure-search-documents/samples/async_samples/sample_documents_crud_async.py delete mode 100644 sdk/search/azure-search-documents/samples/async_samples/sample_facet_query_async.py delete mode 100644 sdk/search/azure-search-documents/samples/async_samples/sample_filter_query_async.py delete mode 100644 sdk/search/azure-search-documents/samples/async_samples/sample_get_document_async.py create mode 100644 sdk/search/azure-search-documents/samples/async_samples/sample_index_alias_crud_async.py delete mode 100644 sdk/search/azure-search-documents/samples/async_samples/sample_index_alias_crud_operations_async.py create mode 100644 sdk/search/azure-search-documents/samples/async_samples/sample_index_analyze_text_async.py create mode 100644 sdk/search/azure-search-documents/samples/async_samples/sample_index_client_custom_request_async.py delete mode 100644 sdk/search/azure-search-documents/samples/async_samples/sample_index_client_send_request_async.py create mode 100644 sdk/search/azure-search-documents/samples/async_samples/sample_index_crud_async.py delete mode 100644 sdk/search/azure-search-documents/samples/async_samples/sample_index_crud_operations_async.py create mode 100644 sdk/search/azure-search-documents/samples/async_samples/sample_index_synonym_map_crud_async.py create mode 100644 sdk/search/azure-search-documents/samples/async_samples/sample_indexer_crud_async.py create mode 100644 sdk/search/azure-search-documents/samples/async_samples/sample_indexer_datasource_crud_async.py delete mode 100644 sdk/search/azure-search-documents/samples/async_samples/sample_indexers_operations_async.py create mode 100644 sdk/search/azure-search-documents/samples/async_samples/sample_query_autocomplete_async.py create mode 100644 sdk/search/azure-search-documents/samples/async_samples/sample_query_facets_async.py create mode 100644 sdk/search/azure-search-documents/samples/async_samples/sample_query_filter_async.py create mode 100644 sdk/search/azure-search-documents/samples/async_samples/sample_query_semantic_async.py create mode 100644 sdk/search/azure-search-documents/samples/async_samples/sample_query_simple_async.py rename sdk/search/azure-search-documents/samples/async_samples/{sample_suggestions_async.py => sample_query_suggestions_async.py} (51%) create mode 100644 sdk/search/azure-search-documents/samples/async_samples/sample_query_vector_async.py create mode 100644 sdk/search/azure-search-documents/samples/async_samples/sample_search_client_custom_request_async.py delete mode 100644 sdk/search/azure-search-documents/samples/async_samples/sample_search_client_send_request_async.py delete mode 100644 sdk/search/azure-search-documents/samples/async_samples/sample_semantic_search_async.py delete mode 100644 sdk/search/azure-search-documents/samples/async_samples/sample_simple_query_async.py delete mode 100644 sdk/search/azure-search-documents/samples/async_samples/sample_synonym_map_operations_async.py delete mode 100644 sdk/search/azure-search-documents/samples/async_samples/sample_vector_search_async.py create mode 100644 sdk/search/azure-search-documents/samples/data/hotels_with_description_vector.json create mode 100644 sdk/search/azure-search-documents/samples/data/query_vector.json rename sdk/search/azure-search-documents/samples/{ => data}/synonym_map.txt (100%) delete mode 100644 sdk/search/azure-search-documents/samples/files/hotel_small.json create mode 100644 sdk/search/azure-search-documents/samples/sample_agentic_retrieval.py delete mode 100644 sdk/search/azure-search-documents/samples/sample_analyze_text.py delete mode 100644 sdk/search/azure-search-documents/samples/sample_buffered_sender.py delete mode 100644 sdk/search/azure-search-documents/samples/sample_crud_operations.py delete mode 100644 sdk/search/azure-search-documents/samples/sample_data_source_operations.py create mode 100644 sdk/search/azure-search-documents/samples/sample_documents_buffered_sender.py create mode 100644 sdk/search/azure-search-documents/samples/sample_documents_crud.py delete mode 100644 sdk/search/azure-search-documents/samples/sample_facet_query.py delete mode 100644 sdk/search/azure-search-documents/samples/sample_filter_query.py delete mode 100644 sdk/search/azure-search-documents/samples/sample_get_document.py create mode 100644 sdk/search/azure-search-documents/samples/sample_index_alias_crud.py delete mode 100644 sdk/search/azure-search-documents/samples/sample_index_alias_crud_operations.py create mode 100644 sdk/search/azure-search-documents/samples/sample_index_analyze_text.py create mode 100644 sdk/search/azure-search-documents/samples/sample_index_client_custom_request.py delete mode 100644 sdk/search/azure-search-documents/samples/sample_index_client_send_request.py create mode 100644 sdk/search/azure-search-documents/samples/sample_index_crud.py delete mode 100644 sdk/search/azure-search-documents/samples/sample_index_crud_operations.py create mode 100644 sdk/search/azure-search-documents/samples/sample_index_synonym_map_crud.py create mode 100644 sdk/search/azure-search-documents/samples/sample_indexer_crud.py create mode 100644 sdk/search/azure-search-documents/samples/sample_indexer_datasource_crud.py delete mode 100644 sdk/search/azure-search-documents/samples/sample_indexer_datasource_skillset.py create mode 100644 sdk/search/azure-search-documents/samples/sample_indexer_workflow.py delete mode 100644 sdk/search/azure-search-documents/samples/sample_indexers_operations.py rename sdk/search/azure-search-documents/samples/{sample_autocomplete.py => sample_query_autocomplete.py} (52%) create mode 100644 sdk/search/azure-search-documents/samples/sample_query_facets.py create mode 100644 sdk/search/azure-search-documents/samples/sample_query_filter.py create mode 100644 sdk/search/azure-search-documents/samples/sample_query_semantic.py create mode 100644 sdk/search/azure-search-documents/samples/sample_query_simple.py create mode 100644 sdk/search/azure-search-documents/samples/sample_query_suggestions.py create mode 100644 sdk/search/azure-search-documents/samples/sample_query_vector.py create mode 100644 sdk/search/azure-search-documents/samples/sample_search_client_custom_request.py delete mode 100644 sdk/search/azure-search-documents/samples/sample_search_client_send_request.py delete mode 100644 sdk/search/azure-search-documents/samples/sample_semantic_search.py delete mode 100644 sdk/search/azure-search-documents/samples/sample_simple_query.py delete mode 100644 sdk/search/azure-search-documents/samples/sample_suggestions.py delete mode 100644 sdk/search/azure-search-documents/samples/sample_synonym_map_operations.py create mode 100644 sdk/search/azure-search-documents/samples/sample_utils.py delete mode 100644 sdk/search/azure-search-documents/samples/sample_vector_search.py diff --git a/sdk/search/azure-search-documents/README.md b/sdk/search/azure-search-documents/README.md index 969bb4c92600..5750b4c0a2d5 100644 --- a/sdk/search/azure-search-documents/README.md +++ b/sdk/search/azure-search-documents/README.md @@ -90,7 +90,7 @@ to get started exploring APIs, but it should be managed carefully.* To instantiate the `SearchClient`, you'll need the **endpoint**, **API key** and **index name**: - + ```python from azure.core.credentials import AzureKeyCredential @@ -170,7 +170,7 @@ To learn more about semantic ranking, you can refer to the [documentation](https **Vector search** is an information retrieval technique that uses numeric representations of searchable documents and query strings. By searching for numeric representations of content that are most similar to the numeric query, vector search can find relevant matches, even if the exact terms of the query are not present in the index. Moreover, vector search can be applied to various types of content, including images and videos and translated text, not just same-language text. -To learn how to index vector fields and perform vector search, you can refer to the [sample](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/sample_vector_search.py). This sample provides detailed guidance on indexing vector fields and demonstrates how to perform vector search. +To learn how to index vector fields and perform vector search, you can refer to the [sample](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/sample_query_vector.py). This sample provides detailed guidance on indexing vector fields and demonstrates how to perform vector search. Additionally, for more comprehensive information about vector search, including its concepts and usage, you can refer to the [documentation](https://learn.microsoft.com/azure/search/vector-search-overview). The documentation provides in-depth explanations and guidance on leveraging the power of vector search in Azure AI Search. @@ -231,30 +231,49 @@ You can use the `SearchIndexClient` to create a search index. Fields can be defined using convenient `SimpleField`, `SearchableField`, or `ComplexField` models. Indexes can also define suggesters, lexical analyzers, and more. - + ```python -client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) -name = "hotels" +from azure.core.credentials import AzureKeyCredential +from azure.search.documents.indexes import SearchIndexClient +from azure.search.documents.indexes.models import ( + ComplexField, + CorsOptions, + SearchIndex, + ScoringProfile, + SearchFieldDataType, + SimpleField, + SearchableField, +) + +index_client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) fields = [ - SimpleField(name="hotelId", type=SearchFieldDataType.String, key=True), - SimpleField(name="hotelName", type=SearchFieldDataType.String, searchable=True), - SimpleField(name="baseRate", type=SearchFieldDataType.Double), - SearchableField(name="description", type=SearchFieldDataType.String, collection=True), + SimpleField(name="HotelId", type=SearchFieldDataType.String, key=True), + SimpleField(name="HotelName", type=SearchFieldDataType.String, searchable=True), + SimpleField(name="BaseRate", type=SearchFieldDataType.Double), + SearchableField( + name="Description", type=SearchFieldDataType.String, collection=True + ), ComplexField( - name="address", + name="Address", fields=[ - SimpleField(name="streetAddress", type=SearchFieldDataType.String), - SimpleField(name="city", type=SearchFieldDataType.String), + SimpleField(name="StreetAddress", type=SearchFieldDataType.String), + SimpleField(name="City", type=SearchFieldDataType.String), ], collection=True, ), ] cors_options = CorsOptions(allowed_origins=["*"], max_age_in_seconds=60) scoring_profiles: List[ScoringProfile] = [] -index = SearchIndex(name=name, fields=fields, scoring_profiles=scoring_profiles, cors_options=cors_options) - -result = client.create_index(index) +index = SearchIndex( + name=index_name, + fields=fields, + scoring_profiles=scoring_profiles, + cors_options=cors_options, +) + +result = index_client.create_index(index) +print(f"Created: index '{result.name}'") ``` @@ -266,17 +285,38 @@ an index in a single batched request. There are [a few special rules for merging](https://learn.microsoft.com/rest/api/searchservice/addupdate-or-delete-documents#document-actions) to be aware of. - + ```python -DOCUMENT = { - "hotelId": "1000", - "hotelName": "Azure Inn", +from azure.core.credentials import AzureKeyCredential +from azure.search.documents import SearchClient + +search_client = SearchClient(service_endpoint, index_name, AzureKeyCredential(key)) + +document = { + "HotelId": "100", + "HotelName": "Azure Sanctuary", + "Description": "A quiet retreat offering understated elegance and premium amenities.", + "Description_fr": "Meilleur hĆ“tel en ville si vous aimez les hĆ“tels de luxe.", + "Category": "Luxury", + "Tags": [ + "pool", + "view", + "wifi", + "concierge", + "private beach", + "gourmet dining", + "spa", + ], + "ParkingIncluded": False, + "LastRenovationDate": "2024-01-15T00:00:00+00:00", + "Rating": 5, + "Location": {"type": "Point", "coordinates": [-122.131577, 47.678581]}, } -result = search_client.upload_documents(documents=[DOCUMENT]) +result = search_client.upload_documents(documents=[document]) -print("Upload of new document succeeded: {}".format(result[0].succeeded)) +print(f"Uploaded: document 100 (succeeded={result[0].succeeded})") ``` @@ -309,7 +349,7 @@ you can retrieve a specific document from your index if you already know the key. You could get the key from a query, for example, and want to show more information about it or navigate your customer to that document. - + ```python from azure.core.credentials import AzureKeyCredential @@ -317,10 +357,11 @@ from azure.search.documents import SearchClient search_client = SearchClient(service_endpoint, index_name, AzureKeyCredential(key)) -result = search_client.get_document(key="23") +result = search_client.get_document(key="100") -print("Details for hotel '23' are:") -print(" Name: {}".format(result["hotelName"])) +print("Result:") +print(f" HotelId: 100") +print(f" HotelName: {result['HotelName']}") ``` @@ -333,7 +374,7 @@ See [azure-core documentation](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/core/azure-core/README.md#transport) for more information. - + ```python from azure.core.credentials import AzureKeyCredential @@ -344,9 +385,9 @@ search_client = SearchClient(service_endpoint, index_name, AzureKeyCredential(ke async with search_client: results = await search_client.search(search_text="spa") - print("Hotels containing 'spa' in the name (or other fields):") + print("Results: hotels with 'spa'") async for result in results: - print(" Name: {} (rating {})".format(result["hotelName"], result["rating"])) + print(f" HotelName: {result['HotelName']} (rating {result['Rating']})") ``` diff --git a/sdk/search/azure-search-documents/azure/search/documents/_search_client.py b/sdk/search/azure-search-documents/azure/search/documents/_search_client.py index 7b46cf50f5e8..d7cc1b0088aa 100644 --- a/sdk/search/azure-search-documents/azure/search/documents/_search_client.py +++ b/sdk/search/azure-search-documents/azure/search/documents/_search_client.py @@ -57,8 +57,8 @@ class SearchClient(HeadersMixin): .. admonition:: Example: .. literalinclude:: ../samples/sample_authentication.py - :start-after: [START create_search_client_with_key] - :end-before: [END create_search_client_with_key] + :start-after: [START authenticate_search_client_with_api_key] + :end-before: [END authenticate_search_client_with_api_key] :language: python :dedent: 4 :caption: Creating the SearchClient with an API key. @@ -130,7 +130,7 @@ def get_document(self, key: str, selected_fields: Optional[List[str]] = None, ** .. admonition:: Example: - .. literalinclude:: ../samples/sample_get_document.py + .. literalinclude:: ../samples/sample_documents_crud.py :start-after: [START get_document] :end-before: [END get_document] :language: python @@ -323,7 +323,7 @@ def search( .. admonition:: Example: - .. literalinclude:: ../samples/sample_simple_query.py + .. literalinclude:: ../samples/sample_query_simple.py :start-after: [START simple_query] :end-before: [END simple_query] :language: python @@ -332,7 +332,7 @@ def search( .. admonition:: Example: - .. literalinclude:: ../samples/sample_filter_query.py + .. literalinclude:: ../samples/sample_query_filter.py :start-after: [START filter_query] :end-before: [END filter_query] :language: python @@ -341,7 +341,7 @@ def search( .. admonition:: Example: - .. literalinclude:: ../samples/sample_facet_query.py + .. literalinclude:: ../samples/sample_query_facets.py :start-after: [START facet_query] :end-before: [END facet_query] :language: python @@ -466,7 +466,7 @@ def suggest( .. admonition:: Example: - .. literalinclude:: ../samples/sample_suggestions.py + .. literalinclude:: ../samples/sample_query_suggestions.py :start-after: [START suggest_query] :end-before: [END suggest_query] :language: python @@ -548,7 +548,7 @@ def autocomplete( .. admonition:: Example: - .. literalinclude:: ../samples/sample_autocomplete.py + .. literalinclude:: ../samples/sample_query_autocomplete.py :start-after: [START autocomplete_query] :end-before: [END autocomplete_query] :language: python @@ -593,7 +593,7 @@ def upload_documents(self, documents: List[Dict], **kwargs: Any) -> List[Indexin .. admonition:: Example: - .. literalinclude:: ../samples/sample_crud_operations.py + .. literalinclude:: ../samples/sample_documents_crud.py :start-after: [START upload_document] :end-before: [END upload_document] :language: python @@ -627,7 +627,7 @@ def delete_documents(self, documents: List[Dict], **kwargs: Any) -> List[Indexin .. admonition:: Example: - .. literalinclude:: ../samples/sample_crud_operations.py + .. literalinclude:: ../samples/sample_documents_crud.py :start-after: [START delete_document] :end-before: [END delete_document] :language: python @@ -657,7 +657,7 @@ def merge_documents(self, documents: List[Dict], **kwargs: Any) -> List[Indexing .. admonition:: Example: - .. literalinclude:: ../samples/sample_crud_operations.py + .. literalinclude:: ../samples/sample_documents_crud.py :start-after: [START merge_document] :end-before: [END merge_document] :language: python diff --git a/sdk/search/azure-search-documents/azure/search/documents/aio/_search_client_async.py b/sdk/search/azure-search-documents/azure/search/documents/aio/_search_client_async.py index a8d4125fb9e2..e39a7f24b04f 100644 --- a/sdk/search/azure-search-documents/azure/search/documents/aio/_search_client_async.py +++ b/sdk/search/azure-search-documents/azure/search/documents/aio/_search_client_async.py @@ -58,8 +58,8 @@ class SearchClient(HeadersMixin): .. admonition:: Example: .. literalinclude:: ../samples/async_samples/sample_authentication_async.py - :start-after: [START create_search_client_with_key_async] - :end-before: [END create_search_client_with_key_async] + :start-after: [START authenticate_search_client_with_api_key_async] + :end-before: [END authenticate_search_client_with_api_key_async] :language: python :dedent: 4 :caption: Creating the SearchClient with an API key. @@ -132,7 +132,7 @@ async def get_document(self, key: str, selected_fields: Optional[List[str]] = No .. admonition:: Example: - .. literalinclude:: ../samples/async_samples/sample_get_document_async.py + .. literalinclude:: ../samples/async_samples/sample_documents_crud_async.py :start-after: [START get_document_async] :end-before: [END get_document_async] :language: python @@ -327,7 +327,7 @@ async def search( .. admonition:: Example: - .. literalinclude:: ../samples/async_samples/sample_simple_query_async.py + .. literalinclude:: ../samples/async_samples/sample_query_simple_async.py :start-after: [START simple_query_async] :end-before: [END simple_query_async] :language: python @@ -336,7 +336,7 @@ async def search( .. admonition:: Example: - .. literalinclude:: ../samples/async_samples/sample_filter_query_async.py + .. literalinclude:: ../samples/async_samples/sample_query_filter_async.py :start-after: [START filter_query_async] :end-before: [END filter_query_async] :language: python @@ -345,7 +345,7 @@ async def search( .. admonition:: Example: - .. literalinclude:: ../samples/async_samples/sample_facet_query_async.py + .. literalinclude:: ../samples/async_samples/sample_query_facets_async.py :start-after: [START facet_query_async] :end-before: [END facet_query_async] :language: python @@ -463,7 +463,7 @@ async def suggest( .. admonition:: Example: - .. literalinclude:: ../samples/async_samples/sample_suggestions_async.py + .. literalinclude:: ../samples/async_samples/sample_query_suggestions_async.py :start-after: [START suggest_query_async] :end-before: [END suggest_query_async] :language: python @@ -545,7 +545,7 @@ async def autocomplete( .. admonition:: Example: - .. literalinclude:: ../samples/async_samples/sample_autocomplete_async.py + .. literalinclude:: ../samples/async_samples/sample_query_autocomplete_async.py :start-after: [START autocomplete_query_async] :end-before: [END autocomplete_query_async] :language: python @@ -590,7 +590,7 @@ async def upload_documents(self, documents: List[Dict], **kwargs: Any) -> List[I .. admonition:: Example: - .. literalinclude:: ../samples/async_samples/sample_crud_operations_async.py + .. literalinclude:: ../samples/async_samples/sample_documents_crud_async.py :start-after: [START upload_document_async] :end-before: [END upload_document_async] :language: python @@ -624,7 +624,7 @@ async def delete_documents(self, documents: List[Dict], **kwargs: Any) -> List[I .. admonition:: Example: - .. literalinclude:: ../samples/async_samples/sample_crud_operations_async.py + .. literalinclude:: ../samples/async_samples/sample_documents_crud_async.py :start-after: [START delete_document_async] :end-before: [END delete_document_async] :language: python @@ -654,7 +654,7 @@ async def merge_documents(self, documents: List[Dict], **kwargs: Any) -> List[In .. admonition:: Example: - .. literalinclude:: ../samples/async_samples/sample_crud_operations_async.py + .. literalinclude:: ../samples/async_samples/sample_documents_crud_async.py :start-after: [START merge_document_async] :end-before: [END merge_document_async] :language: python diff --git a/sdk/search/azure-search-documents/azure/search/documents/indexes/_search_index_client.py b/sdk/search/azure-search-documents/azure/search/documents/indexes/_search_index_client.py index 416614659f05..e8f8dec5cbb0 100644 --- a/sdk/search/azure-search-documents/azure/search/documents/indexes/_search_index_client.py +++ b/sdk/search/azure-search-documents/azure/search/documents/indexes/_search_index_client.py @@ -150,7 +150,7 @@ def get_index(self, name: str, **kwargs: Any) -> SearchIndex: .. admonition:: Example: - .. literalinclude:: ../samples/sample_index_crud_operations.py + .. literalinclude:: ../samples/sample_index_crud.py :start-after: [START get_index] :end-before: [END get_index] :language: python @@ -196,7 +196,7 @@ def delete_index( .. admonition:: Example: - .. literalinclude:: ../samples/sample_index_crud_operations.py + .. literalinclude:: ../samples/sample_index_crud.py :start-after: [START delete_index] :end-before: [END delete_index] :language: python @@ -224,7 +224,7 @@ def create_index(self, index: SearchIndex, **kwargs: Any) -> SearchIndex: .. admonition:: Example: - .. literalinclude:: ../samples/sample_index_crud_operations.py + .. literalinclude:: ../samples/sample_index_crud.py :start-after: [START create_index] :end-before: [END create_index] :language: python @@ -267,7 +267,7 @@ def create_or_update_index( .. admonition:: Example: - .. literalinclude:: ../samples/sample_index_crud_operations.py + .. literalinclude:: ../samples/sample_index_crud.py :start-after: [START update_index] :end-before: [END update_index] :language: python @@ -302,7 +302,7 @@ def analyze_text(self, index_name: str, analyze_request: AnalyzeTextOptions, **k .. admonition:: Example: - .. literalinclude:: ../samples/sample_analyze_text.py + .. literalinclude:: ../samples/sample_index_analyze_text.py :start-after: [START simple_analyze_text] :end-before: [END simple_analyze_text] :language: python @@ -331,7 +331,7 @@ def get_synonym_maps(self, *, select: Optional[List[str]] = None, **kwargs) -> L .. admonition:: Example: - .. literalinclude:: ../samples/sample_synonym_map_operations.py + .. literalinclude:: ../samples/sample_index_synonym_map_crud.py :start-after: [START get_synonym_maps] :end-before: [END get_synonym_maps] :language: python @@ -373,7 +373,7 @@ def get_synonym_map(self, name: str, **kwargs: Any) -> SynonymMap: .. admonition:: Example: - .. literalinclude:: ../samples/sample_synonym_map_operations.py + .. literalinclude:: ../samples/sample_index_synonym_map_crud.py :start-after: [START get_synonym_map] :end-before: [END get_synonym_map] :language: python @@ -404,7 +404,7 @@ def delete_synonym_map( .. admonition:: Example: - .. literalinclude:: ../samples/sample_synonym_map_operations.py + .. literalinclude:: ../samples/sample_index_synonym_map_crud.py :start-after: [START delete_synonym_map] :end-before: [END delete_synonym_map] :language: python @@ -432,7 +432,7 @@ def create_synonym_map(self, synonym_map: SynonymMap, **kwargs: Any) -> SynonymM .. admonition:: Example: - .. literalinclude:: ../samples/sample_synonym_map_operations.py + .. literalinclude:: ../samples/sample_index_synonym_map_crud.py :start-after: [START create_synonym_map] :end-before: [END create_synonym_map] :language: python @@ -565,7 +565,7 @@ def delete_alias( .. admonition:: Example: - .. literalinclude:: ../samples/sample_index_alias_crud_operations.py + .. literalinclude:: ../samples/sample_index_alias_crud.py :start-after: [START delete_alias] :end-before: [END delete_alias] :language: python @@ -593,7 +593,7 @@ def create_alias(self, alias: SearchAlias, **kwargs: Any) -> SearchAlias: .. admonition:: Example: - .. literalinclude:: ../samples/sample_index_alias_crud_operations.py + .. literalinclude:: ../samples/sample_index_alias_crud.py :start-after: [START create_alias] :end-before: [END create_alias] :language: python @@ -625,7 +625,7 @@ def create_or_update_alias( .. admonition:: Example: - .. literalinclude:: ../samples/sample_index_alias_crud_operations.py + .. literalinclude:: ../samples/sample_index_alias_crud.py :start-after: [START update_alias] :end-before: [END update_alias] :language: python diff --git a/sdk/search/azure-search-documents/azure/search/documents/indexes/_search_indexer_client.py b/sdk/search/azure-search-documents/azure/search/documents/indexes/_search_indexer_client.py index b41006ac5158..f3e79eae4898 100644 --- a/sdk/search/azure-search-documents/azure/search/documents/indexes/_search_indexer_client.py +++ b/sdk/search/azure-search-documents/azure/search/documents/indexes/_search_indexer_client.py @@ -97,7 +97,7 @@ def create_indexer(self, indexer: SearchIndexer, **kwargs: Any) -> SearchIndexer .. admonition:: Example: - .. literalinclude:: ../samples/sample_indexers_operations.py + .. literalinclude:: ../samples/sample_indexer_crud.py :start-after: [START create_indexer] :end-before: [END create_indexer] :language: python @@ -160,7 +160,7 @@ def get_indexer(self, name: str, **kwargs: Any) -> SearchIndexer: .. admonition:: Example: - .. literalinclude:: ../samples/sample_indexers_operations.py + .. literalinclude:: ../samples/sample_indexer_crud.py :start-after: [START get_indexer] :end-before: [END get_indexer] :language: python @@ -184,9 +184,9 @@ def get_indexers(self, *, select: Optional[List[str]] = None, **kwargs: Any) -> .. admonition:: Example: - .. literalinclude:: ../samples/sample_indexers_operations.py - :start-after: [START list_indexer] - :end-before: [END list_indexer] + .. literalinclude:: ../samples/sample_indexer_crud.py + :start-after: [START list_indexers] + :end-before: [END list_indexers] :language: python :dedent: 4 :caption: List all the SearchIndexers @@ -208,9 +208,9 @@ def get_indexer_names(self, **kwargs: Any) -> Sequence[str]: .. admonition:: Example: - .. literalinclude:: ../samples/sample_indexers_operations.py - :start-after: [START list_indexer] - :end-before: [END list_indexer] + .. literalinclude:: ../samples/sample_indexer_crud.py + :start-after: [START list_indexers] + :end-before: [END list_indexers] :language: python :dedent: 4 :caption: List all the SearchIndexers @@ -239,7 +239,7 @@ def delete_indexer( .. admonition:: Example: - .. literalinclude:: ../samples/sample_indexers_operations.py + .. literalinclude:: ../samples/sample_indexer_crud.py :start-after: [START delete_indexer] :end-before: [END delete_indexer] :language: python @@ -264,7 +264,7 @@ def run_indexer(self, name: str, **kwargs: Any) -> None: .. admonition:: Example: - .. literalinclude:: ../samples/sample_indexers_operations.py + .. literalinclude:: ../samples/sample_indexer_crud.py :start-after: [START run_indexer] :end-before: [END run_indexer] :language: python @@ -283,7 +283,7 @@ def reset_indexer(self, name: str, **kwargs: Any) -> None: .. admonition:: Example: - .. literalinclude:: ../samples/sample_indexers_operations.py + .. literalinclude:: ../samples/sample_indexer_crud.py :start-after: [START reset_indexer] :end-before: [END reset_indexer] :language: python @@ -363,7 +363,7 @@ def get_indexer_status(self, name: str, **kwargs: Any) -> SearchIndexerStatus: .. admonition:: Example: - .. literalinclude:: ../samples/sample_indexers_operations.py + .. literalinclude:: ../samples/sample_indexer_crud.py :start-after: [START get_indexer_status] :end-before: [END get_indexer_status] :language: python @@ -386,7 +386,7 @@ def create_data_source_connection( .. admonition:: Example: - .. literalinclude:: ../samples/sample_data_source_operations.py + .. literalinclude:: ../samples/sample_indexer_datasource_crud.py :start-after: [START create_data_source_connection] :end-before: [END create_data_source_connection] :language: python @@ -447,7 +447,7 @@ def get_data_source_connection(self, name: str, **kwargs: Any) -> SearchIndexerD .. admonition:: Example: - .. literalinclude:: ../samples/sample_data_source_operations.py + .. literalinclude:: ../samples/sample_indexer_datasource_crud.py :start-after: [START get_data_source_connection] :end-before: [END get_data_source_connection] :language: python @@ -474,9 +474,9 @@ def get_data_source_connections( .. admonition:: Example: - .. literalinclude:: ../samples/sample_data_source_operations.py - :start-after: [START list_data_source_connection] - :end-before: [END list_data_source_connection] + .. literalinclude:: ../samples/sample_indexer_datasource_crud.py + :start-after: [START list_data_source_connections] + :end-before: [END list_data_source_connections] :language: python :dedent: 4 :caption: List all the SearchIndexerDataSourceConnections @@ -524,7 +524,7 @@ def delete_data_source_connection( .. admonition:: Example: - .. literalinclude:: ../samples/sample_data_source_operations.py + .. literalinclude:: ../samples/sample_indexer_datasource_crud.py :start-after: [START delete_data_source_connection] :end-before: [END delete_data_source_connection] :language: python diff --git a/sdk/search/azure-search-documents/azure/search/documents/indexes/aio/_search_index_client.py b/sdk/search/azure-search-documents/azure/search/documents/indexes/aio/_search_index_client.py index e33e978a6fd6..d3ab23909374 100644 --- a/sdk/search/azure-search-documents/azure/search/documents/indexes/aio/_search_index_client.py +++ b/sdk/search/azure-search-documents/azure/search/documents/indexes/aio/_search_index_client.py @@ -148,7 +148,7 @@ async def get_index(self, name: str, **kwargs: Any) -> SearchIndex: .. admonition:: Example: - .. literalinclude:: ../samples/async_samples/sample_index_crud_operations_async.py + .. literalinclude:: ../samples/async_samples/sample_index_crud_async.py :start-after: [START get_index_async] :end-before: [END get_index_async] :language: python @@ -193,7 +193,7 @@ async def delete_index( .. admonition:: Example: - .. literalinclude:: ../samples/async_samples/sample_index_crud_operations_async.py + .. literalinclude:: ../samples/async_samples/sample_index_crud_async.py :start-after: [START delete_index_async] :end-before: [END delete_index_async] :language: python @@ -221,7 +221,7 @@ async def create_index(self, index: SearchIndex, **kwargs: Any) -> SearchIndex: .. admonition:: Example: - .. literalinclude:: ../samples/async_samples/sample_index_crud_operations_async.py + .. literalinclude:: ../samples/async_samples/sample_index_crud_async.py :start-after: [START create_index_async] :end-before: [END create_index_async] :language: python @@ -264,7 +264,7 @@ async def create_or_update_index( .. admonition:: Example: - .. literalinclude:: ../samples/async_samples/sample_index_crud_operations_async.py + .. literalinclude:: ../samples/async_samples/sample_index_crud_async.py :start-after: [START update_index_async] :end-before: [END update_index_async] :language: python @@ -299,7 +299,7 @@ async def analyze_text(self, index_name: str, analyze_request: AnalyzeTextOption .. admonition:: Example: - .. literalinclude:: ../samples/async_samples/sample_analyze_text_async.py + .. literalinclude:: ../samples/async_samples/sample_index_analyze_text_async.py :start-after: [START simple_analyze_text_async] :end-before: [END simple_analyze_text_async] :language: python @@ -328,7 +328,7 @@ async def get_synonym_maps(self, *, select: Optional[List[str]] = None, **kwargs .. admonition:: Example: - .. literalinclude:: ../samples/async_samples/sample_synonym_map_operations_async.py + .. literalinclude:: ../samples/async_samples/sample_index_synonym_map_crud_async.py :start-after: [START get_synonym_maps_async] :end-before: [END get_synonym_maps_async] :language: python @@ -370,7 +370,7 @@ async def get_synonym_map(self, name: str, **kwargs: Any) -> SynonymMap: .. admonition:: Example: - .. literalinclude:: ../samples/async_samples/sample_synonym_map_operations_async.py + .. literalinclude:: ../samples/async_samples/sample_index_synonym_map_crud_async.py :start-after: [START get_synonym_map_async] :end-before: [END get_synonym_map_async] :language: python @@ -401,7 +401,7 @@ async def delete_synonym_map( .. admonition:: Example: - .. literalinclude:: ../samples/async_samples/sample_synonym_map_operations_async.py + .. literalinclude:: ../samples/async_samples/sample_index_synonym_map_crud_async.py :start-after: [START delete_synonym_map_async] :end-before: [END delete_synonym_map_async] :language: python @@ -429,7 +429,7 @@ async def create_synonym_map(self, synonym_map: SynonymMap, **kwargs: Any) -> Sy .. admonition:: Example: - .. literalinclude:: ../samples/async_samples/sample_synonym_map_operations_async.py + .. literalinclude:: ../samples/async_samples/sample_index_synonym_map_crud_async.py :start-after: [START create_synonym_map_async] :end-before: [END create_synonym_map_async] :language: python @@ -560,7 +560,7 @@ async def delete_alias( .. admonition:: Example: - .. literalinclude:: ../samples/async_samples/sample_index_alias_crud_operations_async.py + .. literalinclude:: ../samples/async_samples/sample_index_alias_crud_async.py :start-after: [START delete_alias_async] :end-before: [END delete_alias_async] :language: python @@ -588,7 +588,7 @@ async def create_alias(self, alias: SearchAlias, **kwargs: Any) -> SearchAlias: .. admonition:: Example: - .. literalinclude:: ../samples/async_samples/sample_index_alias_crud_operations_async.py + .. literalinclude:: ../samples/async_samples/sample_index_alias_crud_async.py :start-after: [START create_alias_async] :end-before: [END create_alias_async] :language: python @@ -619,7 +619,7 @@ async def create_or_update_alias( .. admonition:: Example: - .. literalinclude:: ../samples/async_samples/sample_index_alias_crud_operations_async.py + .. literalinclude:: ../samples/async_samples/sample_index_alias_crud_async.py :start-after: [START update_alias_async] :end-before: [END update_alias_async] :language: python diff --git a/sdk/search/azure-search-documents/azure/search/documents/indexes/aio/_search_indexer_client.py b/sdk/search/azure-search-documents/azure/search/documents/indexes/aio/_search_indexer_client.py index 97d0505db5af..b984ba1c8ce8 100644 --- a/sdk/search/azure-search-documents/azure/search/documents/indexes/aio/_search_indexer_client.py +++ b/sdk/search/azure-search-documents/azure/search/documents/indexes/aio/_search_indexer_client.py @@ -92,7 +92,7 @@ async def create_indexer(self, indexer: SearchIndexer, **kwargs: Any) -> SearchI .. admonition:: Example: - .. literalinclude:: ../samples/async_samples/sample_indexers_operations_async.py + .. literalinclude:: ../samples/async_samples/sample_indexer_crud_async.py :start-after: [START create_indexer_async] :end-before: [END create_indexer_async] :language: python @@ -155,7 +155,7 @@ async def get_indexer(self, name: str, **kwargs: Any) -> SearchIndexer: .. admonition:: Example: - .. literalinclude:: ../samples/async_samples/sample_indexers_operations_async.py + .. literalinclude:: ../samples/async_samples/sample_indexer_crud_async.py :start-after: [START get_indexer_async] :end-before: [END get_indexer_async] :language: python @@ -179,9 +179,9 @@ async def get_indexers(self, *, select: Optional[List[str]] = None, **kwargs) -> .. admonition:: Example: - .. literalinclude:: ../samples/async_samples/sample_indexers_operations_async.py - :start-after: [START list_indexer_async] - :end-before: [END list_indexer_async] + .. literalinclude:: ../samples/async_samples/sample_indexer_crud_async.py + :start-after: [START list_indexers_async] + :end-before: [END list_indexers_async] :language: python :dedent: 4 :caption: List all the SearchIndexers @@ -225,7 +225,7 @@ async def delete_indexer( .. admonition:: Example: - .. literalinclude:: ../samples/async_samples/sample_indexers_operations_async.py + .. literalinclude:: ../samples/async_samples/sample_indexer_crud_async.py :start-after: [START delete_indexer_async] :end-before: [END delete_indexer_async] :language: python @@ -250,7 +250,7 @@ async def run_indexer(self, name: str, **kwargs: Any) -> None: .. admonition:: Example: - .. literalinclude:: ../samples/async_samples/sample_indexers_operations_async.py + .. literalinclude:: ../samples/async_samples/sample_indexer_crud_async.py :start-after: [START run_indexer_async] :end-before: [END run_indexer_async] :language: python @@ -269,7 +269,7 @@ async def reset_indexer(self, name: str, **kwargs: Any) -> None: .. admonition:: Example: - .. literalinclude:: ../samples/async_samples/sample_indexers_operations_async.py + .. literalinclude:: ../samples/async_samples/sample_indexer_crud_async.py :start-after: [START reset_indexer_async] :end-before: [END reset_indexer_async] :language: python @@ -351,7 +351,7 @@ async def get_indexer_status(self, name: str, **kwargs: Any) -> SearchIndexerSta .. admonition:: Example: - .. literalinclude:: ../samples/async_samples/sample_indexers_operations_async.py + .. literalinclude:: ../samples/async_samples/sample_indexer_crud_async.py :start-after: [START get_indexer_status_async] :end-before: [END get_indexer_status_async] :language: python @@ -374,7 +374,7 @@ async def create_data_source_connection( .. admonition:: Example: - .. literalinclude:: ../samples/async_samples/sample_data_source_operations_async.py + .. literalinclude:: ../samples/async_samples/sample_indexer_datasource_crud_async.py :start-after: [START create_data_source_connection_async] :end-before: [END create_data_source_connection_async] :language: python @@ -445,7 +445,7 @@ async def delete_data_source_connection( .. admonition:: Example: - .. literalinclude:: ../samples/async_samples/sample_data_source_operations_async.py + .. literalinclude:: ../samples/async_samples/sample_indexer_datasource_crud_async.py :start-after: [START delete_data_source_connection_async] :end-before: [END delete_data_source_connection_async] :language: python @@ -481,7 +481,7 @@ async def get_data_source_connection( .. admonition:: Example: - .. literalinclude:: ../samples/async_samples/sample_data_source_operations_async.py + .. literalinclude:: ../samples/async_samples/sample_indexer_datasource_crud_async.py :start-after: [START get_data_source_connection_async] :end-before: [END get_data_source_connection_async] :language: python @@ -504,9 +504,9 @@ async def get_data_source_connections(self, **kwargs: Any) -> Sequence[SearchInd .. admonition:: Example: - .. literalinclude:: ../samples/async_samples/sample_data_source_operations_async.py - :start-after: [START list_data_source_connection_async] - :end-before: [END list_data_source_connection_async] + .. literalinclude:: ../samples/async_samples/sample_indexer_datasource_crud_async.py + :start-after: [START list_data_source_connections_async] + :end-before: [END list_data_source_connections_async] :language: python :dedent: 4 :caption: List all SearchIndexerDataSourceConnections diff --git a/sdk/search/azure-search-documents/samples/README.md b/sdk/search/azure-search-documents/samples/README.md index 9a658a5cd783..136a99218440 100644 --- a/sdk/search/azure-search-documents/samples/README.md +++ b/sdk/search/azure-search-documents/samples/README.md @@ -7,67 +7,65 @@ products: - azure-search --- -# Samples for Azure Cognitive Search client library for Python +# Azure AI Search Client Library Samples for Python -These code samples show common scenario operations with the Azure Cognitive -Search client library. +These samples demonstrate common scenarios and operations using the Azure AI Search client library. -Authenticate the client with a Azure Cognitive Search [API Key Credential](https://learn.microsoft.com/azure/search/search-security-api-keys): - -[https://github.com/Azure/azure-sdk-for-python/blob/master/sdk/search/azure-search-documents/samples/sample_authentication.py](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/sample_authentication.py) ([async version](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/async_samples/sample_authentication_async.py)) - -Then for common search index operations: - -* Get a document by key: [sample_get_document.py](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/sample_get_document.py) ([async version](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/async_samples/sample_get_document_async.py)) - -* Perform a simple text query: [sample_simple_query.py](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/sample_simple_query.py) ([async version](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/async_samples/sample_simple_query_async.py)) - -* Perform a filtered query: [sample_filter_query.py](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/sample_filter_query.py) ([async version](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/async_samples/sample_filter_query_async.py)) - -* Perform a faceted query: [sample_facet_query.py](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/sample_facet_query.py) ([async version](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/async_samples/sample_facet_query_async.py)) - -* Get auto-completions: [sample_autocomplete.py](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/sample_autocomplete.py) ([async version](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/async_samples/sample_autocomplete_async.py)) - -* Get search suggestions: [sample_suggestions.py](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/sample_suggestions.py) ([async version](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/async_samples/sample_suggestions_async.py)) +## Prerequisites -* Perform basic document updates: [sample_crud_operations.py](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/sample_crud_operations.py) ([async version](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/async_samples/sample_crud_operations_async.py)) +* Python 3.10 or later +* An [Azure subscription](https://azure.microsoft.com/free/) +* An Azure AI Search service +* An index named `hotels-sample-index` created using the 'Import data' wizard with the 'hotels-sample' data source. See [Quickstart: Create a search index in the Azure portal](https://learn.microsoft.com/azure/search/search-get-started-portal?pivots=import-data). +* An Azure Storage account and a blob container named `hotels-sample-container` -* CRUD operations for index: [sample_index_crud_operations.py](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/sample_index_crud_operations.py) ([async version](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/async_samples/sample_index_crud_operations_async.py)) +### Install the package +```bash +pip install azure-search-documents +``` -* Analyze text: [sample_analyze_text.py](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/sample_analyze_text.py) ([async version](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/async_samples/sample_analyze_text_async.py)) +## Examples -* CRUD operations for indexers: [sample_indexers_operations.py](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/sample_indexers_operations.py) ([async version](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/async_samples/sample_indexers_operations_async.py)) +### Authentication -* General workflow of indexer, datasource and index: [sample_indexer_datasource_skillset.py](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/sample_indexer_datasource_skillset.py) +* Authenticate the client with an API Key: [sample_authentication.py](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/sample_authentication.py) ([async version](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/async_samples/sample_authentication_async.py)) -* Semantic search: [sample_semantic_search.py](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/sample_semantic_search.py) +### Document Operations -* Vector search: [sample_vector_search.py](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/sample_vector_search.py) ([async version](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/async_samples/sample_vector_search_async.py)) +* Upload, merge, get, and delete documents: [sample_documents_crud.py](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/sample_documents_crud.py) ([async version](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/async_samples/sample_documents_crud_async.py)) +* High-throughput indexing with buffering: [sample_documents_buffered_sender.py](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/sample_documents_buffered_sender.py) ([async version](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/async_samples/sample_documents_buffered_sender_async.py)) -## Prerequisites +### Query Operations -* Python 3.8 or later is required to use this package -* You must have an [Azure subscription](https://azure.microsoft.com/free/) -* You must create the "Hotels" sample index [in the Azure Portal](https://learn.microsoft.com/azure/search/search-get-started-portal) +* Simple text search: [sample_query_simple.py](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/sample_query_simple.py) ([async version](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/async_samples/sample_query_simple_async.py)) +* Filter and sort search results: [sample_query_filter.py](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/sample_query_filter.py) ([async version](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/async_samples/sample_query_filter_async.py)) +* Faceted search: [sample_query_facets.py](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/sample_query_facets.py) ([async version](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/async_samples/sample_query_facets_async.py)) +* Autocomplete suggestions: [sample_query_autocomplete.py](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/sample_query_autocomplete.py) ([async version](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/async_samples/sample_query_autocomplete_async.py)) +* Search suggestions: [sample_query_suggestions.py](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/sample_query_suggestions.py) ([async version](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/async_samples/sample_query_suggestions_async.py)) +* Semantic search: [sample_query_semantic.py](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/sample_query_semantic.py) ([async version](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/async_samples/sample_query_semantic_async.py)) +* Vector search: [sample_query_vector.py](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/sample_query_vector.py) ([async version](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/async_samples/sample_query_vector_async.py)) +* Session consistency: [sample_query_session.py](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/sample_query_session.py) ([async version](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/async_samples/sample_query_session_async.py)) -## Setup +### Index Operations -1. Install the Azure Cognitive Search client library for Python with [pip](https://pypi.org/project/pip/): +* Create, get, update, and delete indexes: [sample_index_crud.py](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/sample_index_crud.py) ([async version](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/async_samples/sample_index_crud_async.py)) +* Analyze text: [sample_index_analyze_text.py](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/sample_index_analyze_text.py) ([async version](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/async_samples/sample_index_analyze_text_async.py)) +* Index aliases: [sample_index_alias_crud.py](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/sample_index_alias_crud.py) ([async version](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/async_samples/sample_index_alias_crud_async.py)) +* Synonym maps: [sample_index_synonym_map_crud.py](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/sample_index_synonym_map_crud.py) ([async version](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/async_samples/sample_index_synonym_map_crud_async.py)) - ```bash - pip install azure-search-documents --pre - ``` +### Indexer Operations -2. Clone or download [this repository](https://github.com/Azure/azure-sdk-for-python) -3. Open this sample folder in [Visual Studio Code](https://code.visualstudio.com) or your IDE of choice. +* Create, get, update, and delete indexers: [sample_indexer_crud.py](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/sample_indexer_crud.py) ([async version](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/async_samples/sample_indexer_crud_async.py)) +* Create, get, update, and delete data sources: [sample_indexer_datasource_crud.py](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/sample_indexer_datasource_crud.py) ([async version](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/async_samples/sample_indexer_datasource_crud_async.py)) +* Indexer workflow (DataSource, Index, Skillset, Indexer): [sample_indexer_workflow.py](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/sample_indexer_workflow.py) -## Running the samples +### Advanced -1. Open a terminal window and `cd` to the directory that the samples are saved in. -2. Set the environment variables specified in the sample file you wish to run. -3. Follow the usage described in the file, e.g. `python sample_simple_query.py` +* Custom HTTP requests (SearchClient): [sample_search_client_custom_request.py](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/sample_search_client_custom_request.py) ([async version](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/async_samples/sample_search_client_custom_request_async.py)) +* Custom HTTP requests (SearchIndexClient): [sample_index_client_custom_request.py](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/sample_index_client_custom_request.py) ([async version](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/async_samples/sample_index_client_custom_request_async.py)) +* Knowledge base agentic retrieval: [sample_agentic_retrieval.py](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/sample_agentic_retrieval.py) ([async version](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/search/azure-search-documents/samples/async_samples/sample_agentic_retrieval_async.py)) ## Next steps -Check out the [API reference documentation](https://learn.microsoft.com/rest/api/searchservice/) -to learn more about what you can do with the Azure Cognitive Search client library. +Check out the [Azure AI Search REST API reference](https://learn.microsoft.com/rest/api/searchservice/) +to learn more about what you can do with the Azure AI Search client library. diff --git a/sdk/search/azure-search-documents/samples/async_samples/sample_agentic_retrieval_async.py b/sdk/search/azure-search-documents/samples/async_samples/sample_agentic_retrieval_async.py new file mode 100644 index 000000000000..16b76d380a48 --- /dev/null +++ b/sdk/search/azure-search-documents/samples/async_samples/sample_agentic_retrieval_async.py @@ -0,0 +1,221 @@ +# coding: utf-8 + +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# -------------------------------------------------------------------------- + +""" +DESCRIPTION: + Demonstrates Knowledge Source and Knowledge Base CRUD operations and + a minimal retrieval query using a semantic intent. + +USAGE: + python sample_agentic_retrieval_async.py + + Set the following environment variables before running the sample: + 1) AZURE_SEARCH_SERVICE_ENDPOINT - base URL of your Azure AI Search service + (e.g., https://.search.windows.net) + 2) AZURE_SEARCH_INDEX_NAME - target search index name (e.g., "hotels-sample-index") + 3) AZURE_SEARCH_API_KEY - the admin key for your search service +""" + +import asyncio +import json +import os + +from azure.core.credentials import AzureKeyCredential +from azure.core.exceptions import ResourceNotFoundError +from azure.search.documents.indexes.aio import SearchIndexClient +from azure.search.documents.knowledgebases.aio import KnowledgeBaseRetrievalClient +from azure.search.documents.knowledgebases.models import ( + KnowledgeBaseRetrievalRequest, + KnowledgeRetrievalSemanticIntent, +) +from azure.search.documents.indexes.models import ( + KnowledgeBase, + KnowledgeSourceReference, + KnowledgeRetrievalMinimalReasoningEffort, + SearchIndexFieldReference, + SearchIndexKnowledgeSource, + SearchIndexKnowledgeSourceParameters, +) + +service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] +index_name = os.environ["AZURE_SEARCH_INDEX_NAME"] +key = os.environ["AZURE_SEARCH_API_KEY"] + +knowledge_source_name = "hotels-sample-knowledge-source" +knowledge_base_name = "hotels-sample-knowledge-base" + + +async def create_knowledge_source_async(): + # [START create_knowledge_source_async] + index_client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) + + knowledge_source = SearchIndexKnowledgeSource( + name=knowledge_source_name, + search_index_parameters=SearchIndexKnowledgeSourceParameters( + search_index_name=index_name + ), + ) + + async with index_client: + await index_client.create_or_update_knowledge_source( + knowledge_source=knowledge_source + ) + print(f"Created: knowledge source '{knowledge_source_name}'") + # [END create_knowledge_source_async] + + +async def get_knowledge_source_async(): + # [START get_knowledge_source_async] + index_client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) + + async with index_client: + knowledge_source = await index_client.get_knowledge_source( + knowledge_source_name + ) + print(f"Retrieved: knowledge source '{knowledge_source.name}'") + # [END get_knowledge_source_async] + + +async def update_knowledge_source_async(): + # [START update_knowledge_source_async] + index_client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) + + knowledge_source = SearchIndexKnowledgeSource( + name=knowledge_source_name, + search_index_parameters=SearchIndexKnowledgeSourceParameters( + search_index_name=index_name, + source_data_fields=[ + SearchIndexFieldReference(name="HotelId"), + SearchIndexFieldReference(name="HotelName"), + SearchIndexFieldReference(name="Description"), + SearchIndexFieldReference(name="Category"), + SearchIndexFieldReference(name="Tags"), + ], + ), + ) + + async with index_client: + await index_client.create_or_update_knowledge_source( + knowledge_source=knowledge_source + ) + print(f"Updated: knowledge source '{knowledge_source_name}'") + # [END update_knowledge_source_async] + + +async def delete_knowledge_source_async(): + # [START delete_knowledge_source_async] + index_client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) + try: + async with index_client: + await index_client.delete_knowledge_source(knowledge_source_name) + print(f"Deleted: knowledge source '{knowledge_source_name}'") + except ResourceNotFoundError: + print(f"Skipped: knowledge source '{knowledge_source_name}' not found") + # [END delete_knowledge_source_async] + + +async def create_knowledge_base_async(): + # [START create_knowledge_base_async] + index_client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) + + knowledge_base = KnowledgeBase( + name=knowledge_base_name, + knowledge_sources=[KnowledgeSourceReference(name=knowledge_source_name)], + ) + + async with index_client: + await index_client.create_or_update_knowledge_base(knowledge_base) + print(f"Created: knowledge base '{knowledge_base_name}'") + # [END create_knowledge_base_async] + + +async def get_knowledge_base_async(): + # [START get_knowledge_base_async] + index_client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) + + async with index_client: + knowledge_base = await index_client.get_knowledge_base(knowledge_base_name) + print(f"Retrieved: knowledge base '{knowledge_base.name}'") + # [END get_knowledge_base_async] + + +async def update_knowledge_base_async(): + # [START update_knowledge_base_async] + index_client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) + + knowledge_base = KnowledgeBase( + name=knowledge_base_name, + knowledge_sources=[KnowledgeSourceReference(name=knowledge_source_name)], + retrieval_reasoning_effort=KnowledgeRetrievalMinimalReasoningEffort(), + ) + + async with index_client: + await index_client.create_or_update_knowledge_base(knowledge_base) + print(f"Updated: knowledge base '{knowledge_base_name}'") + # [END update_knowledge_base_async] + + +async def retrieve_knowledge_base_async(): + # [START retrieve_knowledge_base_async] + retrieval_client = KnowledgeBaseRetrievalClient( + service_endpoint, + knowledge_base_name=knowledge_base_name, + credential=AzureKeyCredential(key), + ) + + request = KnowledgeBaseRetrievalRequest( + intents=[KnowledgeRetrievalSemanticIntent(search="hotels with free wifi")] + ) + + try: + result = await retrieval_client.retrieve(request) + finally: + await retrieval_client.close() + + print("Results: knowledge base retrieval") + + response_parts = [] + for resp in result.response or []: + for content in resp.content or []: + if hasattr(content, "text"): + response_parts.append(content.text) + + if response_parts: + response_content = "\n\n".join(response_parts) + + items = json.loads(response_content) + for i, item in enumerate(items[:5], start=1): + print(f" Result {i}:") + print(f" Title: {item.get('title')}") + print(f" Content: {item.get('content')}") + else: + print("Results: none") + # [END retrieve_knowledge_base_async] + + +async def delete_knowledge_base_async(): + # [START delete_knowledge_base_async] + index_client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) + try: + async with index_client: + await index_client.delete_knowledge_base(knowledge_base_name) + print(f"Deleted: knowledge base '{knowledge_base_name}'") + except ResourceNotFoundError: + print(f"Skipped: knowledge base '{knowledge_base_name}' not found") + # [END delete_knowledge_base_async] + + +if __name__ == "__main__": + asyncio.run(create_knowledge_source_async()) + asyncio.run(get_knowledge_source_async()) + asyncio.run(update_knowledge_source_async()) + asyncio.run(create_knowledge_base_async()) + asyncio.run(get_knowledge_base_async()) + asyncio.run(update_knowledge_base_async()) + asyncio.run(retrieve_knowledge_base_async()) + asyncio.run(delete_knowledge_base_async()) + asyncio.run(delete_knowledge_source_async()) diff --git a/sdk/search/azure-search-documents/samples/async_samples/sample_analyze_text_async.py b/sdk/search/azure-search-documents/samples/async_samples/sample_analyze_text_async.py deleted file mode 100644 index a20a4d5cc4af..000000000000 --- a/sdk/search/azure-search-documents/samples/async_samples/sample_analyze_text_async.py +++ /dev/null @@ -1,47 +0,0 @@ -# coding: utf-8 - -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- - -""" -FILE: sample_analyze_text_async.py -DESCRIPTION: - This sample demonstrates how to analyze text. -USAGE: - python sample_analyze_text_async.py - - Set the environment variables with your own values before running the sample: - 1) AZURE_SEARCH_SERVICE_ENDPOINT - the endpoint of your Azure Cognitive Search service - 2) AZURE_SEARCH_INDEX_NAME - the name of your search index (e.g. "hotels-sample-index") - 3) AZURE_SEARCH_API_KEY - your search API key -""" - -import os -import asyncio - -service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] -index_name = os.environ["AZURE_SEARCH_INDEX_NAME"] -key = os.environ["AZURE_SEARCH_API_KEY"] - - -async def simple_analyze_text(): - # [START simple_analyze_text_async] - from azure.core.credentials import AzureKeyCredential - from azure.search.documents.indexes.aio import SearchIndexClient - from azure.search.documents.indexes.models import AnalyzeTextOptions - - client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) - - analyze_request = AnalyzeTextOptions(text="One's ", analyzer_name="standard.lucene") - - async with client: - result = await client.analyze_text(index_name, analyze_request) - print(result.as_dict()) - # [END simple_analyze_text_async] - - -if __name__ == "__main__": - asyncio.run(simple_analyze_text()) diff --git a/sdk/search/azure-search-documents/samples/async_samples/sample_authentication_async.py b/sdk/search/azure-search-documents/samples/async_samples/sample_authentication_async.py index e4340c96274c..a34dde17a4ae 100644 --- a/sdk/search/azure-search-documents/samples/async_samples/sample_authentication_async.py +++ b/sdk/search/azure-search-documents/samples/async_samples/sample_authentication_async.py @@ -1,31 +1,30 @@ # coding: utf-8 # ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. # -------------------------------------------------------------------------- """ -FILE: sample_authentication_async.py DESCRIPTION: - This sample demonstrates how to authenticate with the Azure Congnitive Search - service with an API key. See more details about authentication here: - https://learn.microsoft.com/azure.search.documents/search-security-api-keys + Demonstrates how to authenticate with the Azure AI Search service. + USAGE: python sample_authentication_async.py - Set the environment variables with your own values before running the sample: - 1) AZURE_SEARCH_SERVICE_ENDPOINT - the endpoint of your Azure Cognitive Search service - 2) AZURE_SEARCH_INDEX_NAME - the name of your search index (e.g. "hotels-sample-index") - 3) AZURE_SEARCH_API_KEY - your search API key + + Set the following environment variables before running the sample: + 1) AZURE_SEARCH_SERVICE_ENDPOINT - base URL of your Azure AI Search service + (e.g., https://.search.windows.net) + 2) AZURE_SEARCH_INDEX_NAME - target search index name (e.g., "hotels-sample-index") + 3) AZURE_SEARCH_API_KEY - the admin key for your search service """ -import asyncio import os +import asyncio -async def authentication_with_api_key_credential_async(): - # [START create_search_client_with_key_async] +async def authenticate_search_client_with_api_key_async(): + # [START authenticate_search_client_with_api_key_async] from azure.core.credentials import AzureKeyCredential from azure.search.documents.aio import SearchClient @@ -34,28 +33,33 @@ async def authentication_with_api_key_credential_async(): key = os.environ["AZURE_SEARCH_API_KEY"] search_client = SearchClient(service_endpoint, index_name, AzureKeyCredential(key)) - # [END create_search_client_with_key_async] + # [END authenticate_search_client_with_api_key_async] async with search_client: - result = await search_client.get_document_count() + document_count = await search_client.get_document_count() - print("There are {} documents in the {} search index.".format(result, index_name)) + print(f"Document count: {document_count} (index '{index_name}')") -async def authentication_service_client_with_api_key_credential_async(): - # [START create_search_service_with_key_async] +async def authenticate_index_client_with_api_key_async(): + # [START authenticate_index_client_with_api_key_async] from azure.core.credentials import AzureKeyCredential from azure.search.documents.indexes.aio import SearchIndexClient service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] key = os.environ["AZURE_SEARCH_API_KEY"] - client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) - # [END create_search_service_with_key_async] + search_index_client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) + # [END authenticate_index_client_with_api_key_async] + + async with search_index_client: + result = search_index_client.list_indexes() + names = [x.name async for x in result] + print(f"Indexes ({len(names)}): {', '.join(names)}") -async def authentication_with_aad(): - # [START authentication_with_aad] +async def authenticate_search_client_with_aad_async(): + # [START authenticate_search_client_with_aad_async] from azure.identity.aio import DefaultAzureCredential from azure.search.documents.aio import SearchClient @@ -64,28 +68,33 @@ async def authentication_with_aad(): credential = DefaultAzureCredential() search_client = SearchClient(service_endpoint, index_name, credential) - # [END authentication_with_aad] + # [END authenticate_search_client_with_aad_async] async with search_client: - result = await search_client.get_document_count() + document_count = await search_client.get_document_count() - print("There are {} documents in the {} search index.".format(result, index_name)) + print(f"Document count: {document_count} (index '{index_name}')") -async def authentication_service_client_with_aad(): - # [START authentication_service_client_with_aad] +async def authenticate_index_client_with_aad_async(): + # [START authenticate_index_client_with_aad_async] from azure.identity.aio import DefaultAzureCredential from azure.search.documents.indexes.aio import SearchIndexClient service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] credential = DefaultAzureCredential() - client = SearchIndexClient(service_endpoint, credential) - # [END authentication_service_client_with_aad] + search_index_client = SearchIndexClient(service_endpoint, credential) + # [END authenticate_index_client_with_aad_async] + + async with search_index_client: + result = search_index_client.list_indexes() + names = [x.name async for x in result] + print(f"Indexes ({len(names)}): {', '.join(names)}") if __name__ == "__main__": - asyncio.run(authentication_with_api_key_credential_async()) - asyncio.run(authentication_service_client_with_api_key_credential_async()) - asyncio.run(authentication_with_aad()) - asyncio.run(authentication_service_client_with_aad()) + asyncio.run(authenticate_search_client_with_api_key_async()) + asyncio.run(authenticate_index_client_with_api_key_async()) + asyncio.run(authenticate_search_client_with_aad_async()) + asyncio.run(authenticate_index_client_with_aad_async()) diff --git a/sdk/search/azure-search-documents/samples/async_samples/sample_autocomplete_async.py b/sdk/search/azure-search-documents/samples/async_samples/sample_autocomplete_async.py deleted file mode 100644 index 856652212a40..000000000000 --- a/sdk/search/azure-search-documents/samples/async_samples/sample_autocomplete_async.py +++ /dev/null @@ -1,49 +0,0 @@ -# coding: utf-8 - -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- - -""" -FILE: sample_autocomplete_async.py -DESCRIPTION: - This sample demonstrates how to obtain autocompletion suggestions from an - Azure search index. -USAGE: - python sample_autocomplete_async.py - - Set the environment variables with your own values before running the sample: - 1) AZURE_SEARCH_SERVICE_ENDPOINT - the endpoint of your Azure Cognitive Search service - 2) AZURE_SEARCH_INDEX_NAME - the name of your search index (e.g. "hotels-sample-index") - 3) AZURE_SEARCH_API_KEY - your search API key -""" - -import os -import asyncio - - -service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] -index_name = os.environ["AZURE_SEARCH_INDEX_NAME"] -key = os.environ["AZURE_SEARCH_API_KEY"] - - -async def autocomplete_query(): - # [START autocomplete_query_async] - from azure.core.credentials import AzureKeyCredential - from azure.search.documents.aio import SearchClient - - search_client = SearchClient(service_endpoint, index_name, AzureKeyCredential(key)) - - async with search_client: - results = await search_client.autocomplete(search_text="bo", suggester_name="sg") - - print("Autocomplete suggestions for 'bo'") - for result in results: - print(" Completion: {}".format(result["text"])) - # [END autocomplete_query_async] - - -if __name__ == "__main__": - asyncio.run(autocomplete_query()) diff --git a/sdk/search/azure-search-documents/samples/async_samples/sample_buffered_sender_async.py b/sdk/search/azure-search-documents/samples/async_samples/sample_buffered_sender_async.py deleted file mode 100644 index 23b9aeae146c..000000000000 --- a/sdk/search/azure-search-documents/samples/async_samples/sample_buffered_sender_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# coding: utf-8 - -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- - -""" -FILE: sample_batch_client_async.py -DESCRIPTION: - This sample demonstrates how to upload, merge, or delete documents using SearchIndexingBufferedSender. -USAGE: - python sample_batch_client_async.py - - Set the environment variables with your own values before running the sample: - 1) AZURE_SEARCH_SERVICE_ENDPOINT - the endpoint of your Azure Cognitive Search service - 2) AZURE_SEARCH_INDEX_NAME - the name of your search index (e.g. "hotels-sample-index") - 3) AZURE_SEARCH_API_KEY - your search API key -""" - -import os -import asyncio - -service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] -index_name = os.environ["AZURE_SEARCH_INDEX_NAME"] -key = os.environ["AZURE_SEARCH_API_KEY"] - -from azure.core.credentials import AzureKeyCredential -from azure.search.documents.aio import SearchIndexingBufferedSender - - -async def sample_batching_client(): - DOCUMENT = { - "category": "Hotel", - "hotelId": "1000", - "rating": 4.0, - "rooms": [], - "hotelName": "Azure Inn", - } - - async with SearchIndexingBufferedSender(service_endpoint, index_name, AzureKeyCredential(key)) as batch_client: - # add upload actions - await batch_client.upload_documents(documents=[DOCUMENT]) - # add merge actions - await batch_client.merge_documents(documents=[{"hotelId": "1000", "rating": 4.5}]) - # add delete actions - await batch_client.delete_documents(documents=[{"hotelId": "1000"}]) - - -async def main(): - await sample_batching_client() - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/sdk/search/azure-search-documents/samples/async_samples/sample_crud_operations_async.py b/sdk/search/azure-search-documents/samples/async_samples/sample_crud_operations_async.py deleted file mode 100644 index 697d8d03ae10..000000000000 --- a/sdk/search/azure-search-documents/samples/async_samples/sample_crud_operations_async.py +++ /dev/null @@ -1,73 +0,0 @@ -# coding: utf-8 - -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- - -""" -FILE: sample_crud_operations_async.py -DESCRIPTION: - This sample demonstrates how to upload, merge, or delete documents from an - Azure Search index. -USAGE: - python sample_crud_operations_async.py - - Set the environment variables with your own values before running the sample: - 1) AZURE_SEARCH_SERVICE_ENDPOINT - the endpoint of your Azure Cognitive Search service - 2) AZURE_SEARCH_INDEX_NAME - the name of your search index (e.g. "hotels-sample-index") - 3) AZURE_SEARCH_API_KEY - your search API key -""" - -import os -import asyncio - -service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] -index_name = os.environ["AZURE_SEARCH_INDEX_NAME"] -key = os.environ["AZURE_SEARCH_API_KEY"] - -from azure.core.credentials import AzureKeyCredential -from azure.search.documents.aio import SearchClient - -search_client = SearchClient(service_endpoint, index_name, AzureKeyCredential(key)) - - -async def upload_document(): - # [START upload_document_async] - DOCUMENT = { - "hotelId": "1000", - "hotelName": "Azure Inn", - } - - result = await search_client.upload_documents(documents=[DOCUMENT]) - - print("Upload of new document succeeded: {}".format(result[0].succeeded)) - # [END upload_document_async] - - -async def merge_document(): - # [START merge_document_async] - result = await search_client.upload_documents(documents=[{"hotelId": "783", "hotelName": "Renovated Ranch"}]) - - print("Merge into new document succeeded: {}".format(result[0].succeeded)) - # [END merge_document_async] - - -async def delete_document(): - # [START delete_document_async] - result = await search_client.upload_documents(documents=[{"hotelId": "1000"}]) - - print("Delete new document succeeded: {}".format(result[0].succeeded)) - # [END delete_document_async] - - -async def main(): - await upload_document() - await merge_document() - await delete_document() - await search_client.close() - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/sdk/search/azure-search-documents/samples/async_samples/sample_data_source_operations_async.py b/sdk/search/azure-search-documents/samples/async_samples/sample_data_source_operations_async.py deleted file mode 100644 index fd39b24abbe8..000000000000 --- a/sdk/search/azure-search-documents/samples/async_samples/sample_data_source_operations_async.py +++ /dev/null @@ -1,81 +0,0 @@ -# coding: utf-8 - -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- - -""" -FILE: sample_data_source_operations_async.py -DESCRIPTION: - This sample demonstrates how to get, create, update, or delete a Data Source. -USAGE: - python sample_data_source_operations_async.py - - Set the environment variables with your own values before running the sample: - 1) AZURE_SEARCH_SERVICE_ENDPOINT - the endpoint of your Azure Cognitive Search service - 2) AZURE_SEARCH_API_KEY - your search API key -""" - -import asyncio -import os - -service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] -key = os.environ["AZURE_SEARCH_API_KEY"] -connection_string = os.environ["AZURE_STORAGE_CONNECTION_STRING"] - -from azure.core.credentials import AzureKeyCredential -from azure.search.documents.indexes.models import SearchIndexerDataContainer, SearchIndexerDataSourceConnection -from azure.search.documents.indexes.aio import SearchIndexerClient - -client = SearchIndexerClient(service_endpoint, AzureKeyCredential(key)) - - -async def create_data_source_connection(): - # [START create_data_source_connection_async] - container = SearchIndexerDataContainer(name="searchcontainer") - data_source = SearchIndexerDataSourceConnection( - name="async-sample-data-source-connection", - type="azureblob", - connection_string=connection_string, - container=container, - ) - result = await client.create_data_source_connection(data_source) - print("Create new Data Source Connection - async-sample-data-source-connection") - # [END create_data_source_connection_async] - - -async def list_data_source_connections(): - # [START list_data_source_connection_async] - result = await client.get_data_source_connections() - names = [x.name for x in result] - print("Found {} Data Source Connections in the service: {}".format(len(result), ", ".join(names))) - # [END list_data_source_connection_async] - - -async def get_data_source_connection(): - # [START get_data_source_connection_async] - result = await client.get_data_source_connection("async-sample-data-source-connection") - print("Retrived Data Source Connection 'async-sample-data-source-connection'") - return result - # [END get_data_source_connection_async] - - -async def delete_data_source_connection(): - # [START delete_data_source_connection_async] - await client.delete_data_source_connection("async-sample-data-source-connection") - print("Data Source Connection 'async-sample-data-source-connection' successfully deleted") - # [END delete_data_source_connection_async] - - -async def main(): - await create_data_source_connection() - await list_data_source_connections() - await get_data_source_connection() - await delete_data_source_connection() - await client.close() - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/sdk/search/azure-search-documents/samples/async_samples/sample_documents_buffered_sender_async.py b/sdk/search/azure-search-documents/samples/async_samples/sample_documents_buffered_sender_async.py new file mode 100644 index 000000000000..8ff623d4df17 --- /dev/null +++ b/sdk/search/azure-search-documents/samples/async_samples/sample_documents_buffered_sender_async.py @@ -0,0 +1,76 @@ +# coding: utf-8 + +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# -------------------------------------------------------------------------- + +""" +DESCRIPTION: + Demonstrates how to use the SearchIndexingBufferedSender for high-throughput indexing. + +USAGE: + python sample_documents_buffered_sender_async.py + + Set the following environment variables before running the sample: + 1) AZURE_SEARCH_SERVICE_ENDPOINT - base URL of your Azure AI Search service + (e.g., https://.search.windows.net) + 2) AZURE_SEARCH_INDEX_NAME - target search index name (e.g., "hotels-sample-index") + 3) AZURE_SEARCH_API_KEY - the admin key for your search service +""" + +import os +import asyncio + +service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] +index_name = os.environ["AZURE_SEARCH_INDEX_NAME"] +key = os.environ["AZURE_SEARCH_API_KEY"] + + +async def sample_batching_client_async(): + # [START sample_batching_client_async] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.aio import SearchIndexingBufferedSender + + document = { + "HotelId": "100", + "HotelName": "Azure Sanctuary", + "Description": "A quiet retreat offering understated elegance and premium amenities.", + "Description_fr": "Meilleur hĆ“tel en ville si vous aimez les hĆ“tels de luxe.", + "Category": "Luxury", + "Tags": [ + "pool", + "view", + "wifi", + "concierge", + "private beach", + "gourmet dining", + "spa", + ], + "ParkingIncluded": False, + "LastRenovationDate": "2024-01-15T00:00:00+00:00", + "Rating": 5, + "Location": {"type": "Point", "coordinates": [-122.131577, 47.678581]}, + } + + async with SearchIndexingBufferedSender( + service_endpoint, index_name, AzureKeyCredential(key) + ) as buffered_sender: + # add upload actions + await buffered_sender.upload_documents(documents=[document]) + print(f"Uploaded: document {document['HotelId']}") + + # add merge actions + await buffered_sender.merge_documents( + documents=[{"HotelId": "100", "Rating": 4.5}] + ) + print(f"Merged: document {document['HotelId']}") + + # add delete actions + await buffered_sender.delete_documents(documents=[{"HotelId": "100"}]) + print(f"Deleted: document {document['HotelId']}") + # [END sample_batching_client_async] + + +if __name__ == "__main__": + asyncio.run(sample_batching_client_async()) diff --git a/sdk/search/azure-search-documents/samples/async_samples/sample_documents_crud_async.py b/sdk/search/azure-search-documents/samples/async_samples/sample_documents_crud_async.py new file mode 100644 index 000000000000..39d9db3e3d14 --- /dev/null +++ b/sdk/search/azure-search-documents/samples/async_samples/sample_documents_crud_async.py @@ -0,0 +1,115 @@ +# coding: utf-8 + +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# -------------------------------------------------------------------------- + +""" +DESCRIPTION: + Demonstrates how to upload, merge, get, and delete documents. + +USAGE: + python sample_documents_crud_async.py + + Set the following environment variables before running the sample: + 1) AZURE_SEARCH_SERVICE_ENDPOINT - base URL of your Azure AI Search service + (e.g., https://.search.windows.net) + 2) AZURE_SEARCH_INDEX_NAME - target search index name (e.g., "hotels-sample-index") + 3) AZURE_SEARCH_API_KEY - the admin key for your search service +""" + +import os +import asyncio + +service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] +index_name = os.environ["AZURE_SEARCH_INDEX_NAME"] +key = os.environ["AZURE_SEARCH_API_KEY"] + + +async def upload_document_async(): + # [START upload_document_async] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.aio import SearchClient + + search_client = SearchClient(service_endpoint, index_name, AzureKeyCredential(key)) + + document = { + "HotelId": "100", + "HotelName": "Azure Sanctuary", + "Description": "A quiet retreat offering understated elegance and premium amenities.", + "Description_fr": "Meilleur hĆ“tel en ville si vous aimez les hĆ“tels de luxe.", + "Category": "Luxury", + "Tags": [ + "pool", + "view", + "wifi", + "concierge", + "private beach", + "gourmet dining", + "spa", + ], + "ParkingIncluded": False, + "LastRenovationDate": "2024-01-15T00:00:00+00:00", + "Rating": 5, + "Location": {"type": "Point", "coordinates": [-122.131577, 47.678581]}, + } + + async with search_client: + result = await search_client.upload_documents(documents=[document]) + + print(f"Uploaded: document 100 (succeeded={result[0].succeeded})") + # [END upload_document_async] + + +async def merge_document_async(): + # [START merge_document_async] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.aio import SearchClient + + search_client = SearchClient(service_endpoint, index_name, AzureKeyCredential(key)) + + async with search_client: + result = await search_client.merge_documents( + documents=[{"HotelId": "100", "HotelName": "Azure Sanctuary & Spa"}] + ) + + print(f"Merged: document 100 (succeeded={result[0].succeeded})") + # [END merge_document_async] + + +async def get_document_async(): + # [START get_document_async] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.aio import SearchClient + + search_client = SearchClient(service_endpoint, index_name, AzureKeyCredential(key)) + + async with search_client: + result = await search_client.get_document(key="100") + + print("Result:") + print(f" HotelId: 100") + print(f" HotelName: {result['HotelName']}") + # [END get_document_async] + + +async def delete_document_async(): + # [START delete_document_async] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.aio import SearchClient + + search_client = SearchClient(service_endpoint, index_name, AzureKeyCredential(key)) + + async with search_client: + result = await search_client.delete_documents(documents=[{"HotelId": "100"}]) + + print(f"Deleted: document 100 (succeeded={result[0].succeeded})") + # [END delete_document_async] + + +if __name__ == "__main__": + asyncio.run(upload_document_async()) + asyncio.run(merge_document_async()) + asyncio.run(get_document_async()) + asyncio.run(delete_document_async()) diff --git a/sdk/search/azure-search-documents/samples/async_samples/sample_facet_query_async.py b/sdk/search/azure-search-documents/samples/async_samples/sample_facet_query_async.py deleted file mode 100644 index 1ce8666eabbd..000000000000 --- a/sdk/search/azure-search-documents/samples/async_samples/sample_facet_query_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# coding: utf-8 - -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- - -""" -FILE: sample_facet_query_async.py -DESCRIPTION: - This sample demonstrates how to obtain search facets on specified field in - an Azure Search index. -USAGE: - python sample_facet_query_async.py - - Set the environment variables with your own values before running the sample: - 1) AZURE_SEARCH_SERVICE_ENDPOINT - the endpoint of your Azure Cognitive Search service - 2) AZURE_SEARCH_INDEX_NAME - the name of your search index (e.g. "hotels-sample-index") - 3) AZURE_SEARCH_API_KEY - your search API key -""" - -from typing import List, Dict, cast -import os -import asyncio - - -service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] -index_name = os.environ["AZURE_SEARCH_INDEX_NAME"] -key = os.environ["AZURE_SEARCH_API_KEY"] - - -async def filter_query(): - # [START facet_query_async] - from azure.core.credentials import AzureKeyCredential - from azure.search.documents.aio import SearchClient - - search_client = SearchClient(service_endpoint, index_name, AzureKeyCredential(key)) - - async with search_client: - results = await search_client.search(search_text="WiFi", facets=["category,count:3", "parkingIncluded"]) - - facets: Dict[str, List[str]] = cast(Dict[str, List[str]], await results.get_facets()) - - print("Catgory facet counts for hotels:") - for facet in facets["category"]: - print(" {}".format(facet)) - # [END facet_query_async] - - -if __name__ == "__main__": - asyncio.run(filter_query()) diff --git a/sdk/search/azure-search-documents/samples/async_samples/sample_filter_query_async.py b/sdk/search/azure-search-documents/samples/async_samples/sample_filter_query_async.py deleted file mode 100644 index e8dc44bda617..000000000000 --- a/sdk/search/azure-search-documents/samples/async_samples/sample_filter_query_async.py +++ /dev/null @@ -1,54 +0,0 @@ -# coding: utf-8 - -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- - -""" -FILE: sample_filter_query_async.py -DESCRIPTION: - This sample demonstrates how search results from an Azure Search index can - be filtered and ordered. -USAGE: - python sample_filter_query_async.py - - Set the environment variables with your own values before running the sample: - 1) AZURE_SEARCH_SERVICE_ENDPOINT - the endpoint of your Azure Cognitive Search service - 2) AZURE_SEARCH_INDEX_NAME - the name of your search index (e.g. "hotels-sample-index") - 3) AZURE_SEARCH_API_KEY - your search API key -""" - -import os -import asyncio - - -service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] -index_name = os.environ["AZURE_SEARCH_INDEX_NAME"] -key = os.environ["AZURE_SEARCH_API_KEY"] - - -async def filter_query(): - # [START filter_query_async] - from azure.core.credentials import AzureKeyCredential - from azure.search.documents.aio import SearchClient - - search_client = SearchClient(service_endpoint, index_name, AzureKeyCredential(key)) - - async with search_client: - results = await search_client.search( - search_text="WiFi", - filter="Address/StateProvince eq 'FL' and Address/Country eq 'USA'", - select=["hotelName", "rating"], - order_by=["rating desc"], - ) - - print("Florida hotels containing 'WiFi', sorted by Rating:") - async for result in results: - print(" Name: {} (rating {})".format(result["hotelName"], result["rating"])) - # [END filter_query_async] - - -if __name__ == "__main__": - asyncio.run(filter_query()) diff --git a/sdk/search/azure-search-documents/samples/async_samples/sample_get_document_async.py b/sdk/search/azure-search-documents/samples/async_samples/sample_get_document_async.py deleted file mode 100644 index c9b177bef4eb..000000000000 --- a/sdk/search/azure-search-documents/samples/async_samples/sample_get_document_async.py +++ /dev/null @@ -1,48 +0,0 @@ -# coding: utf-8 - -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- - -""" -FILE: sample_get_document_async.py -DESCRIPTION: - This sample demonstrates how to retrieve a specific document by key from an - Azure Search index. -USAGE: - python sample_get_document_async.py - - Set the environment variables with your own values before running the sample: - 1) AZURE_SEARCH_SERVICE_ENDPOINT - the endpoint of your Azure Cognitive Search service - 2) AZURE_SEARCH_INDEX_NAME - the name of your search index (e.g. "hotels-sample-index") - 3) AZURE_SEARCH_API_KEY - your search API key -""" - -import os -import asyncio - - -service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] -index_name = os.environ["AZURE_SEARCH_INDEX_NAME"] -key = os.environ["AZURE_SEARCH_API_KEY"] - - -async def autocomplete_query(): - # [START get_document_async] - from azure.core.credentials import AzureKeyCredential - from azure.search.documents.aio import SearchClient - - search_client = SearchClient(service_endpoint, index_name, AzureKeyCredential(key)) - - async with search_client: - result = await search_client.get_document(key="23") - - print("Details for hotel '23' are:") - print(" Name: {}".format(result["hotelName"])) - # [END get_document_async] - - -if __name__ == "__main__": - asyncio.run(autocomplete_query()) diff --git a/sdk/search/azure-search-documents/samples/async_samples/sample_index_alias_crud_async.py b/sdk/search/azure-search-documents/samples/async_samples/sample_index_alias_crud_async.py new file mode 100644 index 000000000000..7206721ffb9d --- /dev/null +++ b/sdk/search/azure-search-documents/samples/async_samples/sample_index_alias_crud_async.py @@ -0,0 +1,142 @@ +# coding: utf-8 + +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# -------------------------------------------------------------------------- + +""" +DESCRIPTION: + Demonstrates how to create, get, update, and delete an index alias. + +USAGE: + python sample_index_alias_crud_async.py + + Set the following environment variables before running the sample: + 1) AZURE_SEARCH_SERVICE_ENDPOINT - base URL of your Azure AI Search service + (e.g., https://.search.windows.net) + 2) AZURE_SEARCH_API_KEY - the admin key for your search service + 3) AZURE_SEARCH_INDEX_NAME - target search index name (e.g., "hotels-sample-index") +""" + + +import asyncio +import os + +service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] +index_name = os.environ["AZURE_SEARCH_INDEX_NAME"] +key = os.environ["AZURE_SEARCH_API_KEY"] +alias_name = "hotel-alias" +new_index_name = "hotels-sample-index-v2" + + +async def create_alias_async(): + # [START create_alias_async] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.indexes.aio import SearchIndexClient + from azure.search.documents.indexes.models import SearchAlias + + index_client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) + + alias = SearchAlias(name=alias_name, indexes=[index_name]) + async with index_client: + result = await index_client.create_alias(alias) + print(f"Created: alias '{result.name}' -> index '{index_name}'") + # [END create_alias_async] + + +async def get_alias_async(): + # [START get_alias_async] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.indexes.aio import SearchIndexClient + + index_client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) + + async with index_client: + result = await index_client.get_alias(alias_name) + print(f"Retrieved: alias '{result.name}'") + # [END get_alias_async] + + +async def update_alias_async(): + # [START update_alias_async] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.indexes.aio import SearchIndexClient + from azure.search.documents.indexes.models import ( + ComplexField, + CorsOptions, + ScoringProfile, + SearchAlias, + SearchIndex, + SimpleField, + SearchableField, + SearchFieldDataType, + ) + + index_client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) + + # Create a new index with a different schema or settings + # In a real scenario, this would be your updated index version (e.g., v2) + fields = [ + SimpleField(name="HotelId", type=SearchFieldDataType.String, key=True), + SimpleField(name="BaseRate", type=SearchFieldDataType.Double), + SearchableField( + name="Description", type=SearchFieldDataType.String, collection=True + ), + SearchableField(name="HotelName", type=SearchFieldDataType.String), + ComplexField( + name="Address", + fields=[ + SimpleField(name="StreetAddress", type=SearchFieldDataType.String), + SimpleField(name="City", type=SearchFieldDataType.String), + SimpleField(name="State", type=SearchFieldDataType.String), + ], + collection=True, + ), + ] + cors_options = CorsOptions(allowed_origins=["*"], max_age_in_seconds=60) + scoring_profile = ScoringProfile(name="MyProfile") + index = SearchIndex( + name=new_index_name, + fields=fields, + scoring_profiles=[scoring_profile], + cors_options=cors_options, + ) + + async with index_client: + await index_client.create_or_update_index(index=index) + print(f"Created: index '{new_index_name}'") + + # Update the alias to point to the new index + # This operation is atomic and ensures zero downtime for applications using the alias + alias = SearchAlias(name=alias_name, indexes=[new_index_name]) + result = await index_client.create_or_update_alias(alias) + print(f"Updated: alias '{result.name}' -> index '{new_index_name}'") + # [END update_alias_async] + + +async def delete_alias_async(): + # [START delete_alias_async] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.indexes.aio import SearchIndexClient + from azure.core.exceptions import ResourceNotFoundError + + index_client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) + + async with index_client: + await index_client.delete_alias(alias_name) + print(f"Deleted: alias '{alias_name}'") + + try: + await index_client.delete_index(new_index_name) + print(f"Deleted: index '{new_index_name}'") + except ResourceNotFoundError: + print(f"Skipped: index '{new_index_name}' not found") + # [END delete_alias_async] + + +if __name__ == "__main__": + asyncio.run(create_alias_async()) + asyncio.run(get_alias_async()) + asyncio.run(update_alias_async()) + asyncio.run(delete_alias_async()) diff --git a/sdk/search/azure-search-documents/samples/async_samples/sample_index_alias_crud_operations_async.py b/sdk/search/azure-search-documents/samples/async_samples/sample_index_alias_crud_operations_async.py deleted file mode 100644 index 3a47a8fb8641..000000000000 --- a/sdk/search/azure-search-documents/samples/async_samples/sample_index_alias_crud_operations_async.py +++ /dev/null @@ -1,111 +0,0 @@ -# coding: utf-8 - -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- - -""" -FILE: sample_index_alias_crud_operations_async.py -DESCRIPTION: - This sample demonstrates how to get, create, update, or delete an alias with an existing index. -USAGE: - python sample_index_alias_crud_operations_async.py - - Set the environment variables with your own values before running the sample: - 1) AZURE_SEARCH_SERVICE_ENDPOINT - the endpoint of your Azure Cognitive Search service - 2) AZURE_SEARCH_API_KEY - your search API key - 3) AZURE_SEARCH_INDEX_NAME - the name of your search index (e.g. "hotels-sample-index") -""" - - -import asyncio -import os - -service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] -index_name = os.environ["AZURE_SEARCH_INDEX_NAME"] -key = os.environ["AZURE_SEARCH_API_KEY"] -alias_name = "motels" - -from azure.core.credentials import AzureKeyCredential -from azure.search.documents.indexes.aio import SearchIndexClient -from azure.search.documents.indexes.models import ( - ComplexField, - CorsOptions, - ScoringProfile, - SearchAlias, - SearchIndex, - SimpleField, - SearchableField, - SearchFieldDataType, -) - - -client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) - - -async def create_alias(): - # [START create_alias_async] - alias = SearchAlias(name=alias_name, indexes=[index_name]) - result = await client.create_alias(alias) - # [END create_alias_async] - - -async def get_alias(): - # [START get_alias_async] - result = await client.get_alias(alias_name) - # [END get_alias_async] - - -async def update_alias(): - # [START update_alias_async] - new_index_name = "hotels" - fields = [ - SimpleField(name="hotelId", type=SearchFieldDataType.String, key=True), - SimpleField(name="baseRate", type=SearchFieldDataType.Double), - SearchableField(name="description", type=SearchFieldDataType.String, collection=True), - SearchableField(name="hotelName", type=SearchFieldDataType.String), - ComplexField( - name="address", - fields=[ - SimpleField(name="streetAddress", type=SearchFieldDataType.String), - SimpleField(name="city", type=SearchFieldDataType.String), - SimpleField(name="state", type=SearchFieldDataType.String), - ], - collection=True, - ), - ] - cors_options = CorsOptions(allowed_origins=["*"], max_age_in_seconds=60) - scoring_profile = ScoringProfile(name="MyProfile") - scoring_profiles = [] - scoring_profiles.append(scoring_profile) - index = SearchIndex( - name=new_index_name, fields=fields, scoring_profiles=scoring_profiles, cors_options=cors_options - ) - - result_index = await client.create_or_update_index(index=index) - - alias = SearchAlias(name=alias_name, indexes=[new_index_name]) - result = await client.create_or_update_alias(alias) - - # [END update_alias_async] - - -async def delete_alias(): - # [START delete_alias_async] - - await client.delete_alias(alias_name) - # [END delete_alias_async] - - -async def main(): - await create_alias() - await get_alias() - await update_alias() - await delete_alias() - await client.close() - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/sdk/search/azure-search-documents/samples/async_samples/sample_index_analyze_text_async.py b/sdk/search/azure-search-documents/samples/async_samples/sample_index_analyze_text_async.py new file mode 100644 index 000000000000..e5ca9a20cb9d --- /dev/null +++ b/sdk/search/azure-search-documents/samples/async_samples/sample_index_analyze_text_async.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# -------------------------------------------------------------------------- + +""" +DESCRIPTION: + Demonstrates how to analyze text using a specific analyzer. + +USAGE: + python sample_index_analyze_text_async.py + + Set the following environment variables before running the sample: + 1) AZURE_SEARCH_SERVICE_ENDPOINT - base URL of your Azure AI Search service + (e.g., https://.search.windows.net) + 2) AZURE_SEARCH_INDEX_NAME - target search index name (e.g., "hotels-sample-index") + 3) AZURE_SEARCH_API_KEY - the admin key for your search service +""" + +import os +import asyncio + +service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] +index_name = os.environ["AZURE_SEARCH_INDEX_NAME"] +key = os.environ["AZURE_SEARCH_API_KEY"] + + +async def simple_analyze_text_async(): + # [START simple_analyze_text_async] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.indexes.aio import SearchIndexClient + from azure.search.documents.indexes.models import AnalyzeTextOptions + + index_client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) + + analyze_request = AnalyzeTextOptions( + text="One's ", analyzer_name="standard.lucene" + ) + + async with index_client: + analysis_result = await index_client.analyze_text(index_name, analyze_request) + + print("Results:") + for token in analysis_result.tokens: + print( + f" Token: {token.token}, Start: {token.start_offset}, End: {token.end_offset}" + ) + # [END simple_analyze_text_async] + + +if __name__ == "__main__": + asyncio.run(simple_analyze_text_async()) diff --git a/sdk/search/azure-search-documents/samples/async_samples/sample_index_client_custom_request_async.py b/sdk/search/azure-search-documents/samples/async_samples/sample_index_client_custom_request_async.py new file mode 100644 index 000000000000..a34d5a982e26 --- /dev/null +++ b/sdk/search/azure-search-documents/samples/async_samples/sample_index_client_custom_request_async.py @@ -0,0 +1,58 @@ +# coding: utf-8 + +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# -------------------------------------------------------------------------- + +""" +DESCRIPTION: + Demonstrates how to make custom HTTP requests using SearchIndexClient. + +USAGE: + python sample_index_client_custom_request_async.py + + Set the following environment variables before running the sample: + 1) AZURE_SEARCH_SERVICE_ENDPOINT - base URL of your Azure AI Search service + (e.g., https://.search.windows.net) + 2) AZURE_SEARCH_INDEX_NAME - target search index name (e.g., "hotels-sample-index") + 3) AZURE_SEARCH_API_KEY - the admin key for your search service +""" + + +async def sample_send_request_async(): + # [START sample_send_request_async] + import os + import sys + from pathlib import Path + from azure.core.credentials import AzureKeyCredential + from azure.core.rest import HttpRequest + from azure.search.documents.indexes.aio import SearchIndexClient + + sys.path.append(str(Path(__file__).resolve().parents[1])) + from sample_utils import AZURE_SEARCH_API_VERSION + + endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] + index_name = os.environ["AZURE_SEARCH_INDEX_NAME"] + key = os.environ["AZURE_SEARCH_API_KEY"] + + index_client = SearchIndexClient(endpoint, AzureKeyCredential(key)) + + # The `send_request` method can send custom HTTP requests that share the client's existing pipeline, + # while adding convenience for endpoint construction. + request = HttpRequest( + method="GET", + url=f"/indexes('{index_name}')?api-version={AZURE_SEARCH_API_VERSION}", + ) + async with index_client: + response = await index_client.send_request(request) + response.raise_for_status() + response_body = response.json() + print(f"Response: {response_body}") + # [END sample_send_request_async] + + +if __name__ == "__main__": + import asyncio + + asyncio.run(sample_send_request_async()) diff --git a/sdk/search/azure-search-documents/samples/async_samples/sample_index_client_send_request_async.py b/sdk/search/azure-search-documents/samples/async_samples/sample_index_client_send_request_async.py deleted file mode 100644 index cd767d24f621..000000000000 --- a/sdk/search/azure-search-documents/samples/async_samples/sample_index_client_send_request_async.py +++ /dev/null @@ -1,49 +0,0 @@ -# coding: utf-8 - -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- - -""" -FILE: sample_index_client_send_request_async.py - -DESCRIPTION: - This sample demonstrates how to make custom HTTP requests through a client pipeline. - -USAGE: - python sample_index_client_send_request_async.py - - Set the environment variables with your own values before running the sample: - 1) AZURE_SEARCH_SERVICE_ENDPOINT - the endpoint of your Azure Cognitive Search service - 2) AZURE_SEARCH_INDEX_NAME - the name of your search index (e.g. "hotels-sample-index") - 3) AZURE_SEARCH_API_KEY - your search API key -""" - -import asyncio -import os -from azure.core.credentials import AzureKeyCredential -from azure.core.rest import HttpRequest -from azure.search.documents.indexes.aio import SearchIndexClient - - -async def sample_send_request(): - endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] - index_name = os.environ["AZURE_SEARCH_INDEX_NAME"] - key = os.environ["AZURE_SEARCH_API_KEY"] - - client = SearchIndexClient(endpoint, AzureKeyCredential(key)) - - # The `send_request` method can send custom HTTP requests that share the client's existing pipeline, - # while adding convenience for endpoint construction. - request = HttpRequest(method="GET", url=f"/indexes('{index_name}')?api-version=2024-05-01-preview") - async with client: - response = await client.send_request(request) - response.raise_for_status() - response_body = response.json() - print(response_body) - - -if __name__ == "__main__": - asyncio.run(sample_send_request()) diff --git a/sdk/search/azure-search-documents/samples/async_samples/sample_index_crud_async.py b/sdk/search/azure-search-documents/samples/async_samples/sample_index_crud_async.py new file mode 100644 index 000000000000..f0bab5497144 --- /dev/null +++ b/sdk/search/azure-search-documents/samples/async_samples/sample_index_crud_async.py @@ -0,0 +1,156 @@ +# coding: utf-8 + +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# -------------------------------------------------------------------------- + +""" +DESCRIPTION: + Demonstrates how to create, get, update, and delete a search index. + +USAGE: + python sample_index_crud_async.py + + Set the following environment variables before running the sample: + 1) AZURE_SEARCH_SERVICE_ENDPOINT - base URL of your Azure AI Search service + (e.g., https://.search.windows.net) + 2) AZURE_SEARCH_API_KEY - the admin key for your search service +""" + + +import os +import asyncio +from typing import List + +service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] +key = os.environ["AZURE_SEARCH_API_KEY"] +index_name = "hotels-sample-index-index-crud" + + +async def create_index_async(): + # [START create_index_async] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.indexes.aio import SearchIndexClient + from azure.search.documents.indexes.models import ( + ComplexField, + CorsOptions, + SearchIndex, + ScoringProfile, + SearchFieldDataType, + SimpleField, + SearchableField, + ) + + index_client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) + fields = [ + SimpleField(name="HotelId", type=SearchFieldDataType.String, key=True), + SimpleField(name="HotelName", type=SearchFieldDataType.String, searchable=True), + SimpleField(name="BaseRate", type=SearchFieldDataType.Double), + SearchableField( + name="Description", type=SearchFieldDataType.String, collection=True + ), + ComplexField( + name="Address", + fields=[ + SimpleField(name="StreetAddress", type=SearchFieldDataType.String), + SimpleField(name="City", type=SearchFieldDataType.String), + ], + collection=True, + ), + ] + + cors_options = CorsOptions(allowed_origins=["*"], max_age_in_seconds=60) + scoring_profiles: List[ScoringProfile] = [] + index = SearchIndex( + name=index_name, + fields=fields, + scoring_profiles=scoring_profiles, + cors_options=cors_options, + ) + + async with index_client: + result = await index_client.create_index(index) + print(f"Created: index '{result.name}'") + # [END create_index_async] + + +async def get_index_async(): + # [START get_index_async] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.indexes.aio import SearchIndexClient + + index_client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) + async with index_client: + result = await index_client.get_index(index_name) + print(f"Retrieved: index '{result.name}'") + # [END get_index_async] + + +async def update_index_async(): + # [START update_index_async] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.indexes.aio import SearchIndexClient + from azure.search.documents.indexes.models import ( + ComplexField, + CorsOptions, + SearchIndex, + ScoringProfile, + SearchFieldDataType, + SimpleField, + SearchableField, + ) + + index_client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) + fields = [ + SimpleField(name="HotelId", type=SearchFieldDataType.String, key=True), + SimpleField(name="HotelName", type=SearchFieldDataType.String, searchable=True), + SimpleField(name="BaseRate", type=SearchFieldDataType.Double), + SearchableField( + name="Description", type=SearchFieldDataType.String, collection=True + ), + ComplexField( + name="Address", + fields=[ + SimpleField(name="StreetAddress", type=SearchFieldDataType.String), + SimpleField(name="City", type=SearchFieldDataType.String), + SimpleField(name="State", type=SearchFieldDataType.String), + ], + collection=True, + ), + ] + + cors_options = CorsOptions(allowed_origins=["*"], max_age_in_seconds=60) + scoring_profile = ScoringProfile(name="MyProfile") + scoring_profiles = [] + scoring_profiles.append(scoring_profile) + index = SearchIndex( + name=index_name, + fields=fields, + scoring_profiles=scoring_profiles, + cors_options=cors_options, + ) + + async with index_client: + result = await index_client.create_or_update_index(index=index) + print(f"Updated: index '{result.name}'") + # [END update_index_async] + + +async def delete_index_async(): + # [START delete_index_async] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.indexes.aio import SearchIndexClient + + index_client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) + async with index_client: + await index_client.delete_index(index_name) + print(f"Deleted: index '{index_name}'") + # [END delete_index_async] + + +if __name__ == "__main__": + asyncio.run(create_index_async()) + asyncio.run(get_index_async()) + asyncio.run(update_index_async()) + asyncio.run(delete_index_async()) diff --git a/sdk/search/azure-search-documents/samples/async_samples/sample_index_crud_operations_async.py b/sdk/search/azure-search-documents/samples/async_samples/sample_index_crud_operations_async.py deleted file mode 100644 index 8431867adfb0..000000000000 --- a/sdk/search/azure-search-documents/samples/async_samples/sample_index_crud_operations_async.py +++ /dev/null @@ -1,127 +0,0 @@ -# coding: utf-8 - -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- - -""" -FILE: sample_index_crud_operations_async.py -DESCRIPTION: - This sample demonstrates how to get, create, update, or delete an index. -USAGE: - python sample_index_crud_operations_async.py - - Set the environment variables with your own values before running the sample: - 1) AZURE_SEARCH_SERVICE_ENDPOINT - the endpoint of your Azure Cognitive Search service - 2) AZURE_SEARCH_API_KEY - your search API key -""" - - -import os -import asyncio -from typing import List - -service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] -key = os.environ["AZURE_SEARCH_API_KEY"] - -from azure.core.credentials import AzureKeyCredential -from azure.search.documents.indexes.aio import SearchIndexClient -from azure.search.documents.indexes.models import ( - ComplexField, - CorsOptions, - SearchIndex, - ScoringProfile, - SearchFieldDataType, - SimpleField, - SearchableField, -) - - -async def create_index(): - # [START create_index_async] - client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) - name = "hotels" - fields = [ - SimpleField(name="hotelId", type=SearchFieldDataType.String, key=True), - SimpleField(name="hotelName", type=SearchFieldDataType.String, searchable=True), - SimpleField(name="baseRate", type=SearchFieldDataType.Double), - SearchableField(name="description", type=SearchFieldDataType.String, collection=True), - ComplexField( - name="address", - fields=[ - SimpleField(name="streetAddress", type=SearchFieldDataType.String), - SimpleField(name="city", type=SearchFieldDataType.String), - ], - collection=True, - ), - ] - - cors_options = CorsOptions(allowed_origins=["*"], max_age_in_seconds=60) - scoring_profiles: List[ScoringProfile] = [] - index = SearchIndex(name=name, fields=fields, scoring_profiles=scoring_profiles, cors_options=cors_options) - - result = await client.create_index(index) - await client.close() - # [END create_index_async] - - -async def get_index(): - # [START get_index_async] - client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) - name = "hotels" - result = await client.get_index(name) - await client.close() - # [END get_index_async] - - -async def update_index(): - # [START update_index_async] - client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) - name = "hotels" - fields = [ - SimpleField(name="hotelId", type=SearchFieldDataType.String, key=True), - SimpleField(name="hotelName", type=SearchFieldDataType.String, searchable=True), - SimpleField(name="baseRate", type=SearchFieldDataType.Double), - SearchableField(name="description", type=SearchFieldDataType.String, collection=True), - ComplexField( - name="address", - fields=[ - SimpleField(name="streetAddress", type=SearchFieldDataType.String), - SimpleField(name="city", type=SearchFieldDataType.String), - SimpleField(name="state", type=SearchFieldDataType.String), - ], - collection=True, - ), - ] - - cors_options = CorsOptions(allowed_origins=["*"], max_age_in_seconds=60) - scoring_profile = ScoringProfile(name="MyProfile") - scoring_profiles = [] - scoring_profiles.append(scoring_profile) - index = SearchIndex(name=name, fields=fields, scoring_profiles=scoring_profiles, cors_options=cors_options) - - result = await client.create_or_update_index(index=index) - await client.close() - # [END update_index_async] - - -async def delete_index(): - # [START delete_index_async] - client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) - name = "hotels" - await client.delete_index(name) - await client.close() - # [END delete_index_async] - - -async def main(): - await create_index() - await get_index() - await update_index() - await delete_index() - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/sdk/search/azure-search-documents/samples/async_samples/sample_index_synonym_map_crud_async.py b/sdk/search/azure-search-documents/samples/async_samples/sample_index_synonym_map_crud_async.py new file mode 100644 index 000000000000..e545d1197520 --- /dev/null +++ b/sdk/search/azure-search-documents/samples/async_samples/sample_index_synonym_map_crud_async.py @@ -0,0 +1,112 @@ +# coding: utf-8 + +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# -------------------------------------------------------------------------- + +""" +DESCRIPTION: + Demonstrates how to create, get, update, and delete a synonym map. +USAGE: + python sample_index_synonym_map_crud_async.py + + Set the following environment variables before running the sample: + 1) AZURE_SEARCH_SERVICE_ENDPOINT - base URL of your Azure AI Search service + 2) AZURE_SEARCH_API_KEY - the primary admin key for your search service +""" + +import asyncio +import os +from pathlib import Path + +service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] +key = os.environ["AZURE_SEARCH_API_KEY"] + +map1 = "hotels-sample-synonym-map" +map2 = "hotels-sample-synonym-map-file" +file_path = Path(__file__).resolve().parents[1] / "data" / "synonym_map.txt" + + +async def create_synonym_map_async(name): + # [START create_synonym_map_async] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.indexes.aio import SearchIndexClient + from azure.search.documents.indexes.models import SynonymMap + + index_client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) + synonyms = [ + "USA, United States, United States of America", + "Washington, Wash. => WA", + ] + synonym_map = SynonymMap(name=name, synonyms=synonyms) + async with index_client: + result = await index_client.create_synonym_map(synonym_map) + print(f"Created: synonym map '{result.name}'") + # [END create_synonym_map_async] + + +async def create_synonym_map_from_file_async(name): + # [START create_synonym_map_from_file_async] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.indexes.aio import SearchIndexClient + from azure.search.documents.indexes.models import SynonymMap + + index_client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) + with open(file_path, "r") as f: + solr_format_synonyms = f.read() + synonyms = solr_format_synonyms.split("\n") + synonym_map = SynonymMap(name=name, synonyms=synonyms) + async with index_client: + result = await index_client.create_synonym_map(synonym_map) + print(f"Created: synonym map '{result.name}'") + # [END create_synonym_map_from_file_async] + + +async def get_synonym_maps_async(): + # [START get_synonym_maps_async] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.indexes.aio import SearchIndexClient + + index_client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) + async with index_client: + result = await index_client.get_synonym_maps() + names = [x.name for x in result] + print(f"Synonym maps ({len(result)}): {', '.join(names)}") + # [END get_synonym_maps_async] + + +async def get_synonym_map_async(name): + # [START get_synonym_map_async] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.indexes.aio import SearchIndexClient + + index_client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) + async with index_client: + result = await index_client.get_synonym_map(name) + print(f"Retrieved: synonym map '{name}'") + if result: + for syn in result.synonyms: + print(f" {syn}") + # [END get_synonym_map_async] + + +async def delete_synonym_map_async(name): + # [START delete_synonym_map_async] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.indexes.aio import SearchIndexClient + + index_client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) + async with index_client: + await index_client.delete_synonym_map(name) + print(f"Deleted: synonym map '{name}'") + # [END delete_synonym_map_async] + + +if __name__ == "__main__": + asyncio.run(create_synonym_map_async(map1)) + asyncio.run(create_synonym_map_from_file_async(map2)) + asyncio.run(get_synonym_maps_async()) + asyncio.run(get_synonym_map_async(map1)) + asyncio.run(delete_synonym_map_async(map1)) + asyncio.run(delete_synonym_map_async(map2)) diff --git a/sdk/search/azure-search-documents/samples/async_samples/sample_indexer_crud_async.py b/sdk/search/azure-search-documents/samples/async_samples/sample_indexer_crud_async.py new file mode 100644 index 000000000000..2f338e5e3989 --- /dev/null +++ b/sdk/search/azure-search-documents/samples/async_samples/sample_indexer_crud_async.py @@ -0,0 +1,194 @@ +# coding: utf-8 + +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# -------------------------------------------------------------------------- + +""" +DESCRIPTION: + Demonstrates how to create, get, update, and delete an indexer. + +USAGE: + python sample_indexer_crud_async.py + + Set the following environment variables before running the sample: + 1) AZURE_SEARCH_SERVICE_ENDPOINT - base URL of your Azure AI Search service + (e.g., https://.search.windows.net) + 2) AZURE_SEARCH_API_KEY - the admin key for your search service + 3) AZURE_STORAGE_CONNECTION_STRING - connection string for the Azure Storage account +""" + +import asyncio +import os + +service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] +key = os.environ["AZURE_SEARCH_API_KEY"] +connection_string = os.environ["AZURE_STORAGE_CONNECTION_STRING"] +container_name = "hotels-sample-container" +index_name = "hotels-sample-index-indexer-crud" +data_source_name = "hotels-sample-blob" +indexer_name = "hotels-sample-indexer-indexer-crud" + + +async def create_indexer_async(): + # [START create_indexer_async] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.indexes.aio import ( + SearchIndexClient, + SearchIndexerClient, + ) + from azure.search.documents.indexes.models import ( + SearchIndexerDataContainer, + SearchIndexerDataSourceConnection, + SearchIndex, + SearchIndexer, + SimpleField, + SearchFieldDataType, + ) + + indexer_client = SearchIndexerClient(service_endpoint, AzureKeyCredential(key)) + index_client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) + + # create an index + fields = [ + SimpleField(name="HotelId", type=SearchFieldDataType.String, key=True), + SimpleField(name="BaseRate", type=SearchFieldDataType.Double), + ] + index = SearchIndex(name=index_name, fields=fields) + async with index_client: + await index_client.create_index(index) + + # create a datasource + container = SearchIndexerDataContainer(name=container_name) + data_source_connection = SearchIndexerDataSourceConnection( + name=data_source_name, + type="azureblob", + connection_string=connection_string, + container=container, + ) + async with indexer_client: + await indexer_client.create_data_source_connection(data_source_connection) + + # create an indexer + indexer = SearchIndexer( + name=indexer_name, + data_source_name=data_source_name, + target_index_name=index_name, + ) + result = await indexer_client.create_indexer(indexer) + print(f"Created: indexer '{result.name}'") + # [END create_indexer_async] + + +async def list_indexers_async(): + # [START list_indexers_async] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.indexes.aio import SearchIndexerClient + + indexer_client = SearchIndexerClient(service_endpoint, AzureKeyCredential(key)) + + async with indexer_client: + result = await indexer_client.get_indexers() + names = [x.name for x in result] + print(f"Indexers ({len(result)}): {', '.join(names)}") + # [END list_indexers_async] + + +async def get_indexer_async(): + # [START get_indexer_async] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.indexes.aio import SearchIndexerClient + + indexer_client = SearchIndexerClient(service_endpoint, AzureKeyCredential(key)) + + async with indexer_client: + result = await indexer_client.get_indexer(indexer_name) + print(f"Retrieved: indexer '{result.name}'") + return result + # [END get_indexer_async] + + +async def get_indexer_status_async(): + # [START get_indexer_status_async] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.indexes.aio import SearchIndexerClient + + indexer_client = SearchIndexerClient(service_endpoint, AzureKeyCredential(key)) + + async with indexer_client: + result = await indexer_client.get_indexer_status(indexer_name) + print(f"Status: indexer '{indexer_name}' is {result.status}") + return result + # [END get_indexer_status_async] + + +async def run_indexer_async(): + # [START run_indexer_async] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.indexes.aio import SearchIndexerClient + + indexer_client = SearchIndexerClient(service_endpoint, AzureKeyCredential(key)) + + async with indexer_client: + await indexer_client.run_indexer(indexer_name) + print(f"Ran: indexer '{indexer_name}'") + # [END run_indexer_async] + return + + +async def reset_indexer_async(): + # [START reset_indexer_async] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.indexes.aio import SearchIndexerClient + + indexer_client = SearchIndexerClient(service_endpoint, AzureKeyCredential(key)) + async with indexer_client: + await indexer_client.reset_indexer(indexer_name) + print(f"Reset: indexer '{indexer_name}'") + return + # [END reset_indexer_async] + + +async def delete_indexer_async(): + # [START delete_indexer_async] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.indexes.aio import SearchIndexerClient + + indexer_client = SearchIndexerClient(service_endpoint, AzureKeyCredential(key)) + async with indexer_client: + await indexer_client.delete_indexer(indexer_name) + print(f"Deleted: indexer '{indexer_name}'") + # [END delete_indexer_async] + + +async def delete_data_source_async(): + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.indexes.aio import SearchIndexerClient + + indexer_client = SearchIndexerClient(service_endpoint, AzureKeyCredential(key)) + async with indexer_client: + await indexer_client.delete_data_source_connection(data_source_name) + print(f"Deleted: data source '{data_source_name}'") + + +async def delete_index_async(): + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.indexes.aio import SearchIndexClient + + index_client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) + async with index_client: + await index_client.delete_index(index_name) + print(f"Deleted: index '{index_name}'") + + +if __name__ == "__main__": + asyncio.run(create_indexer_async()) + asyncio.run(list_indexers_async()) + asyncio.run(get_indexer_async()) + asyncio.run(get_indexer_status_async()) + asyncio.run(run_indexer_async()) + asyncio.run(reset_indexer_async()) + asyncio.run(delete_indexer_async()) + asyncio.run(delete_data_source_async()) + asyncio.run(delete_index_async()) diff --git a/sdk/search/azure-search-documents/samples/async_samples/sample_indexer_datasource_crud_async.py b/sdk/search/azure-search-documents/samples/async_samples/sample_indexer_datasource_crud_async.py new file mode 100644 index 000000000000..4a817dbe7309 --- /dev/null +++ b/sdk/search/azure-search-documents/samples/async_samples/sample_indexer_datasource_crud_async.py @@ -0,0 +1,105 @@ +# coding: utf-8 + +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# -------------------------------------------------------------------------- + +""" +DESCRIPTION: + Demonstrates how to create, get, update, and delete a data source. + +USAGE: + python sample_indexer_datasource_crud_async.py + + Set the following environment variables before running the sample: + 1) AZURE_SEARCH_SERVICE_ENDPOINT - base URL of your Azure AI Search service + (e.g., https://.search.windows.net) + 2) AZURE_SEARCH_API_KEY - the admin key for your search service + 3) AZURE_STORAGE_CONNECTION_STRING - connection string for the Azure Storage account +""" + +import asyncio +import os + +service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] +key = os.environ["AZURE_SEARCH_API_KEY"] +connection_string = os.environ["AZURE_STORAGE_CONNECTION_STRING"] +data_source_connection_name = "hotels-sample-blob" +container_name = "hotels-sample-container" + + +async def create_data_source_connection_async(): + # [START create_data_source_connection_async] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.indexes.aio import SearchIndexerClient + from azure.search.documents.indexes.models import ( + SearchIndexerDataContainer, + SearchIndexerDataSourceConnection, + ) + + indexer_client = SearchIndexerClient(service_endpoint, AzureKeyCredential(key)) + + container = SearchIndexerDataContainer(name=container_name) + data_source_connection = SearchIndexerDataSourceConnection( + name=data_source_connection_name, + type="azureblob", + connection_string=connection_string, + container=container, + ) + async with indexer_client: + result = await indexer_client.create_data_source_connection( + data_source_connection + ) + print(f"Created: data source '{result.name}'") + # [END create_data_source_connection_async] + + +async def list_data_source_connections_async(): + # [START list_data_source_connections_async] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.indexes.aio import SearchIndexerClient + + indexer_client = SearchIndexerClient(service_endpoint, AzureKeyCredential(key)) + + async with indexer_client: + result = await indexer_client.get_data_source_connections() + names = [ds.name for ds in result] + print(f"Data sources ({len(result)}): {', '.join(names)}") + # [END list_data_source_connections_async] + + +async def get_data_source_connection_async(): + # [START get_data_source_connection_async] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.indexes.aio import SearchIndexerClient + + indexer_client = SearchIndexerClient(service_endpoint, AzureKeyCredential(key)) + + async with indexer_client: + result = await indexer_client.get_data_source_connection( + data_source_connection_name + ) + print(f"Retrieved: data source '{result.name}'") + return result + # [END get_data_source_connection_async] + + +async def delete_data_source_connection_async(): + # [START delete_data_source_connection_async] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.indexes.aio import SearchIndexerClient + + indexer_client = SearchIndexerClient(service_endpoint, AzureKeyCredential(key)) + + async with indexer_client: + await indexer_client.delete_data_source_connection(data_source_connection_name) + print(f"Deleted: data source '{data_source_connection_name}'") + # [END delete_data_source_connection_async] + + +if __name__ == "__main__": + asyncio.run(create_data_source_connection_async()) + asyncio.run(list_data_source_connections_async()) + asyncio.run(get_data_source_connection_async()) + asyncio.run(delete_data_source_connection_async()) diff --git a/sdk/search/azure-search-documents/samples/async_samples/sample_indexers_operations_async.py b/sdk/search/azure-search-documents/samples/async_samples/sample_indexers_operations_async.py deleted file mode 100644 index e83e1bbc66c3..000000000000 --- a/sdk/search/azure-search-documents/samples/async_samples/sample_indexers_operations_async.py +++ /dev/null @@ -1,132 +0,0 @@ -# coding: utf-8 - -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- - -""" -FILE: sample_indexer_operations_async.py -DESCRIPTION: - This sample demonstrates how to get, create, update, or delete a Indexer. -USAGE: - python sample_indexer_operations_async.py - - Set the environment variables with your own values before running the sample: - 1) AZURE_SEARCH_SERVICE_ENDPOINT - the endpoint of your Azure Cognitive Search service - 2) AZURE_SEARCH_API_KEY - your search API key -""" - -import asyncio -import os - -service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] -key = os.environ["AZURE_SEARCH_API_KEY"] -connection_string = os.environ["AZURE_STORAGE_CONNECTION_STRING"] - -from azure.core.credentials import AzureKeyCredential -from azure.search.documents.indexes.models import ( - SearchIndexerDataContainer, - SearchIndexerDataSourceConnection, - SearchIndex, - SearchIndexer, - SimpleField, - SearchFieldDataType, -) -from azure.search.documents.indexes.aio import SearchIndexerClient, SearchIndexClient - -indexers_client = SearchIndexerClient(service_endpoint, AzureKeyCredential(key)) - - -async def create_indexer(): - # create an index - index_name = "async-indexer-hotels" - fields = [ - SimpleField(name="hotelId", type=SearchFieldDataType.String, key=True), - SimpleField(name="baseRate", type=SearchFieldDataType.Double), - ] - index = SearchIndex(name=index_name, fields=fields) - ind_client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) - async with ind_client: - await ind_client.create_index(index) - - # [START create_indexer_async] - # create a datasource - container = SearchIndexerDataContainer(name="searchcontainer") - data_source_connection = SearchIndexerDataSourceConnection( - name="async-indexer-datasource", type="azureblob", connection_string=connection_string, container=container - ) - data_source = await indexers_client.create_data_source_connection(data_source_connection) - - # create an indexer - indexer = SearchIndexer( - name="async-sample-indexer", - data_source_name="async-indexer-datasource", - target_index_name="async-indexer-hotels", - ) - result = await indexers_client.create_indexer(indexer) - print("Create new Indexer - async-sample-indexer") - # [END create_indexer_async] - - -async def list_indexers(): - # [START list_indexer_async] - result = await indexers_client.get_indexers() - names = [x.name for x in result] - print("Found {} Indexers in the service: {}".format(len(result), ", ".join(names))) - # [END list_indexer_async] - - -async def get_indexer(): - # [START get_indexer_async] - result = await indexers_client.get_indexer("async-sample-indexer") - print("Retrived Indexer 'async-sample-indexer'") - return result - # [END get_indexer_async] - - -async def get_indexer_status(): - # [START get_indexer_status_async] - result = await indexers_client.get_indexer_status("async-sample-indexer") - print("Retrived Indexer status for 'async-sample-indexer'") - return result - # [END get_indexer_status_async] - - -async def run_indexer(): - # [START run_indexer_async] - await indexers_client.run_indexer("async-sample-indexer") - print("Ran the Indexer 'async-sample-indexer'") - return - # [END run_indexer_async] - - -async def reset_indexer(): - # [START reset_indexer_async] - await indexers_client.reset_indexer("async-sample-indexer") - print("Reset the Indexer 'async-sample-indexer'") - return - # [END reset_indexer_async] - - -async def delete_indexer(): - # [START delete_indexer_async] - await indexers_client.delete_indexer("async-sample-indexer") - print("Indexer 'async-sample-indexer' successfully deleted") - # [END delete_indexer_async] - - -async def main(): - await create_indexer() - await list_indexers() - await get_indexer() - await get_indexer_status() - await run_indexer() - await reset_indexer() - await delete_indexer() - await indexers_client.close() - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/sdk/search/azure-search-documents/samples/async_samples/sample_query_autocomplete_async.py b/sdk/search/azure-search-documents/samples/async_samples/sample_query_autocomplete_async.py new file mode 100644 index 000000000000..70b3eb5d5121 --- /dev/null +++ b/sdk/search/azure-search-documents/samples/async_samples/sample_query_autocomplete_async.py @@ -0,0 +1,50 @@ +# coding: utf-8 + +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# -------------------------------------------------------------------------- + +""" +DESCRIPTION: + Demonstrates how to retrieve autocomplete suggestions. + +USAGE: + python sample_query_autocomplete_async.py + + Set the following environment variables before running the sample: + 1) AZURE_SEARCH_SERVICE_ENDPOINT - base URL of your Azure AI Search service + (e.g., https://.search.windows.net) + 2) AZURE_SEARCH_INDEX_NAME - target search index name (e.g., "hotels-sample-index") + 3) AZURE_SEARCH_API_KEY - the admin key for your search service +""" + +import os +import asyncio + + +service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] +index_name = os.environ["AZURE_SEARCH_INDEX_NAME"] +key = os.environ["AZURE_SEARCH_API_KEY"] + + +async def autocomplete_query_async(): + # [START autocomplete_query_async] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.aio import SearchClient + + search_client = SearchClient(service_endpoint, index_name, AzureKeyCredential(key)) + + async with search_client: + results = await search_client.autocomplete( + search_text="bo", suggester_name="sg" + ) + + print("Results: autocomplete for 'bo'") + for result in results: + print(f" Completion: {result['text']}") + # [END autocomplete_query_async] + + +if __name__ == "__main__": + asyncio.run(autocomplete_query_async()) diff --git a/sdk/search/azure-search-documents/samples/async_samples/sample_query_facets_async.py b/sdk/search/azure-search-documents/samples/async_samples/sample_query_facets_async.py new file mode 100644 index 000000000000..5dacbf5b2a8b --- /dev/null +++ b/sdk/search/azure-search-documents/samples/async_samples/sample_query_facets_async.py @@ -0,0 +1,53 @@ +# coding: utf-8 + +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# -------------------------------------------------------------------------- + +""" +DESCRIPTION: + Demonstrates how to retrieve facets in search results. + +USAGE: + python sample_query_facets_async.py + + Set the following environment variables before running the sample: + 1) AZURE_SEARCH_SERVICE_ENDPOINT - base URL of your Azure AI Search service + (e.g., https://.search.windows.net) + 2) AZURE_SEARCH_INDEX_NAME - target search index name (e.g., "hotels-sample-index") + 3) AZURE_SEARCH_API_KEY - the admin key for your search service +""" + +import os +import asyncio + + +service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] +index_name = os.environ["AZURE_SEARCH_INDEX_NAME"] +key = os.environ["AZURE_SEARCH_API_KEY"] + + +async def facet_query_async(): + # [START facet_query_async] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.aio import SearchClient + + search_client = SearchClient(service_endpoint, index_name, AzureKeyCredential(key)) + + async with search_client: + results = await search_client.search( + search_text="WiFi", facets=["Category,count:3", "ParkingIncluded"] + ) + + facets = await results.get_facets() + + print("Results: category facets") + if facets: + for facet in facets["Category"]: + print(f" {facet}") + # [END facet_query_async] + + +if __name__ == "__main__": + asyncio.run(facet_query_async()) diff --git a/sdk/search/azure-search-documents/samples/async_samples/sample_query_filter_async.py b/sdk/search/azure-search-documents/samples/async_samples/sample_query_filter_async.py new file mode 100644 index 000000000000..4083b2446be1 --- /dev/null +++ b/sdk/search/azure-search-documents/samples/async_samples/sample_query_filter_async.py @@ -0,0 +1,53 @@ +# coding: utf-8 + +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# -------------------------------------------------------------------------- + +""" +DESCRIPTION: + Demonstrates how to filter and sort search results. + +USAGE: + python sample_query_filter_async.py + + Set the following environment variables before running the sample: + 1) AZURE_SEARCH_SERVICE_ENDPOINT - base URL of your Azure AI Search service + (e.g., https://.search.windows.net) + 2) AZURE_SEARCH_INDEX_NAME - target search index name (e.g., "hotels-sample-index") + 3) AZURE_SEARCH_API_KEY - the admin key for your search service +""" + +import os +import asyncio + + +service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] +index_name = os.environ["AZURE_SEARCH_INDEX_NAME"] +key = os.environ["AZURE_SEARCH_API_KEY"] + + +async def filter_query_async(): + # [START filter_query_async] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.aio import SearchClient + + search_client = SearchClient(service_endpoint, index_name, AzureKeyCredential(key)) + + async with search_client: + results = await search_client.search( + search_text="WiFi", + filter="Address/StateProvince eq 'FL' and Address/Country eq 'USA'", + select=["HotelName", "Rating"], + order_by=["Rating desc"], + ) + + print("Results: Florida hotels with WiFi (sorted by rating)") + async for result in results: + print(f" HotelName: {result['HotelName']} (rating {result['Rating']})") + # [END filter_query_async] + + +if __name__ == "__main__": + asyncio.run(filter_query_async()) diff --git a/sdk/search/azure-search-documents/samples/async_samples/sample_query_semantic_async.py b/sdk/search/azure-search-documents/samples/async_samples/sample_query_semantic_async.py new file mode 100644 index 000000000000..e8ac4c536b04 --- /dev/null +++ b/sdk/search/azure-search-documents/samples/async_samples/sample_query_semantic_async.py @@ -0,0 +1,120 @@ +# coding: utf-8 + +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# -------------------------------------------------------------------------- + +""" +DESCRIPTION: + Demonstrates how to perform semantic search. +USAGE: + python sample_query_semantic_async.py + + Set the following environment variables before running the sample: + 1) AZURE_SEARCH_SERVICE_ENDPOINT - base URL of your Azure AI Search service + 2) AZURE_SEARCH_INDEX_NAME - target search index name (e.g., "hotels-sample-index") + 3) AZURE_SEARCH_API_KEY - the primary admin key for your search service +""" + +import os +import asyncio + +service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] +index_name = os.environ["AZURE_SEARCH_INDEX_NAME"] +key = os.environ["AZURE_SEARCH_API_KEY"] +semantic_configuration_name = "hotels-sample-semantic-config" + + +async def create_semantic_configuration_async(): + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.indexes.aio import SearchIndexClient + from azure.search.documents.indexes.models import ( + SemanticConfiguration, + SemanticPrioritizedFields, + SemanticField, + SemanticSearch, + ) + + index_client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) + async with index_client: + index = await index_client.get_index(index_name) + + semantic_config = SemanticConfiguration( + name=semantic_configuration_name, + prioritized_fields=SemanticPrioritizedFields( + title_field=SemanticField(field_name="HotelName"), + content_fields=[SemanticField(field_name="Description")], + keywords_fields=[SemanticField(field_name="Tags")], + ), + ) + + index.semantic_search = SemanticSearch(configurations=[semantic_config]) + await index_client.create_or_update_index(index) + print( + f"Updated: index '{index_name}' (semantic config '{semantic_configuration_name}')" + ) + + +async def speller_async(): + # [START speller_async] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.aio import SearchClient + + credential = AzureKeyCredential(key) + search_client = SearchClient( + endpoint=service_endpoint, index_name=index_name, credential=credential + ) + async with search_client: + results = await search_client.search( + search_text="luxury", query_language="en-us", query_speller="lexicon" + ) + + print("Results: speller") + async for result in results: + print(f" HotelId: {result['HotelId']}") + print(f" HotelName: {result['HotelName']}") + # [END speller_async] + + +async def semantic_ranking_async(): + # [START semantic_ranking_async] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.aio import SearchClient + + credential = AzureKeyCredential(key) + search_client = SearchClient( + endpoint=service_endpoint, index_name=index_name, credential=credential + ) + async with search_client: + results = await search_client.search( + search_text="luxury", + query_type="semantic", + semantic_configuration_name=semantic_configuration_name, + query_language="en-us", + ) + + print("Results: semantic ranking") + async for result in results: + print(f" HotelId: {result['HotelId']}") + print(f" HotelName: {result['HotelName']}") + # [END semantic_ranking_async] + + +async def delete_semantic_configuration_async(): + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.indexes.aio import SearchIndexClient + + index_client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) + async with index_client: + index = await index_client.get_index(index_name) + index.semantic_search = None + await index_client.create_or_update_index(index) + print(f"Deleted: semantic config from index '{index_name}'") + + +if __name__ == "__main__": + asyncio.run(create_semantic_configuration_async()) + asyncio.run(speller_async()) + asyncio.run(semantic_ranking_async()) + asyncio.run(delete_semantic_configuration_async()) diff --git a/sdk/search/azure-search-documents/samples/async_samples/sample_query_session_async.py b/sdk/search/azure-search-documents/samples/async_samples/sample_query_session_async.py index bfd02c0b6e27..3dc5c1e861e9 100644 --- a/sdk/search/azure-search-documents/samples/async_samples/sample_query_session_async.py +++ b/sdk/search/azure-search-documents/samples/async_samples/sample_query_session_async.py @@ -1,24 +1,20 @@ # coding: utf-8 # ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. # -------------------------------------------------------------------------- """ -FILE: sample_query_session_async.py DESCRIPTION: - To ensure more consistent and unique search results within a user's session, you can use session id. - Simply include the session_id parameter in your queries to create a unique identifier for each user session. - This ensures a uniform experience for users throughout their "query session". + Demonstrates how to use session IDs for consistent scoring. USAGE: python sample_query_session_async.py - Set the environment variables with your own values before running the sample: - 1) AZURE_SEARCH_SERVICE_ENDPOINT - the endpoint of your Azure Cognitive Search service - 2) AZURE_SEARCH_INDEX_NAME - the name of your search index (e.g. "hotels-sample-index") - 3) AZURE_SEARCH_API_KEY - your search API key + Set the following environment variables before running the sample: + 1) AZURE_SEARCH_SERVICE_ENDPOINT - base URL of your Azure AI Search service + 2) AZURE_SEARCH_INDEX_NAME - target search index name (e.g., "hotels-sample-index") + 3) AZURE_SEARCH_API_KEY - the primary admin key for your search service """ import os @@ -29,7 +25,7 @@ key = os.environ["AZURE_SEARCH_API_KEY"] -async def query_session(): +async def query_session_async(): # [START query_session_async] from azure.core.credentials import AzureKeyCredential from azure.search.documents.aio import SearchClient @@ -39,11 +35,11 @@ async def query_session(): async with search_client: results = await search_client.search(search_text="spa", session_id="session-1") - print("Hotels containing 'spa' in the name (or other fields):") + print("Results: hotels with 'spa'") async for result in results: - print(" Name: {} (rating {})".format(result["hotelName"], result["rating"])) + print(f" HotelName: {result['HotelName']} (rating {result['Rating']})") # [END query_session_async] if __name__ == "__main__": - asyncio.run(query_session()) + asyncio.run(query_session_async()) diff --git a/sdk/search/azure-search-documents/samples/async_samples/sample_query_simple_async.py b/sdk/search/azure-search-documents/samples/async_samples/sample_query_simple_async.py new file mode 100644 index 000000000000..8bc4efba8373 --- /dev/null +++ b/sdk/search/azure-search-documents/samples/async_samples/sample_query_simple_async.py @@ -0,0 +1,46 @@ +# coding: utf-8 + +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# -------------------------------------------------------------------------- + +""" +DESCRIPTION: + Demonstrates how to perform a simple text search. +USAGE: + python sample_query_simple_async.py + + Set the following environment variables before running the sample: + 1) AZURE_SEARCH_SERVICE_ENDPOINT - base URL of your Azure AI Search service + 2) AZURE_SEARCH_INDEX_NAME - target search index name (e.g., "hotels-sample-index") + 3) AZURE_SEARCH_API_KEY - the primary admin key for your search service +""" + +import os +import asyncio + + +service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] +index_name = os.environ["AZURE_SEARCH_INDEX_NAME"] +key = os.environ["AZURE_SEARCH_API_KEY"] + + +async def simple_query_async(): + # [START simple_query_async] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.aio import SearchClient + + search_client = SearchClient(service_endpoint, index_name, AzureKeyCredential(key)) + + async with search_client: + results = await search_client.search(search_text="spa") + + print("Results: hotels with 'spa'") + async for result in results: + print(f" HotelName: {result['HotelName']} (rating {result['Rating']})") + # [END simple_query_async] + + +if __name__ == "__main__": + asyncio.run(simple_query_async()) diff --git a/sdk/search/azure-search-documents/samples/async_samples/sample_suggestions_async.py b/sdk/search/azure-search-documents/samples/async_samples/sample_query_suggestions_async.py similarity index 51% rename from sdk/search/azure-search-documents/samples/async_samples/sample_suggestions_async.py rename to sdk/search/azure-search-documents/samples/async_samples/sample_query_suggestions_async.py index 8f3fef5a90af..cc70c799b77c 100644 --- a/sdk/search/azure-search-documents/samples/async_samples/sample_suggestions_async.py +++ b/sdk/search/azure-search-documents/samples/async_samples/sample_query_suggestions_async.py @@ -1,23 +1,20 @@ # coding: utf-8 # ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. # -------------------------------------------------------------------------- """ -FILE: sample_suggestions_async.py DESCRIPTION: - This sample demonstrates how to obtain search suggestions from an Azure - search index. + Demonstrates how to retrieve search suggestions. USAGE: - python sample_suggestions_async.py + python sample_query_suggestions_async.py - Set the environment variables with your own values before running the sample: - 1) AZURE_SEARCH_SERVICE_ENDPOINT - the endpoint of your Azure Cognitive Search service - 2) AZURE_SEARCH_INDEX_NAME - the name of your search index (e.g. "hotels-sample-index") - 3) AZURE_SEARCH_API_KEY - your search API key + Set the following environment variables before running the sample: + 1) AZURE_SEARCH_SERVICE_ENDPOINT - base URL of your Azure AI Search service + 2) AZURE_SEARCH_INDEX_NAME - target search index name (e.g., "hotels-sample-index") + 3) AZURE_SEARCH_API_KEY - the primary admin key for your search service """ import os @@ -29,7 +26,7 @@ key = os.environ["AZURE_SEARCH_API_KEY"] -async def suggest_query(): +async def suggest_query_async(): # [START suggest_query_async] from azure.core.credentials import AzureKeyCredential from azure.search.documents.aio import SearchClient @@ -39,12 +36,12 @@ async def suggest_query(): async with search_client: results = await search_client.suggest(search_text="coffee", suggester_name="sg") - print("Search suggestions for 'coffee'") + print("Results: suggestions for 'coffee'") for result in results: - hotel = await search_client.get_document(key=result["hotelId"]) - print(" Text: {} for Hotel: {}".format(repr(result["text"]), hotel["hotelName"])) + hotel = await search_client.get_document(key=result["HotelId"]) + print(f" Text: {result['text']!r}, HotelName: {hotel['HotelName']}") # [END suggest_query_async] if __name__ == "__main__": - asyncio.run(suggest_query()) + asyncio.run(suggest_query_async()) diff --git a/sdk/search/azure-search-documents/samples/async_samples/sample_query_vector_async.py b/sdk/search/azure-search-documents/samples/async_samples/sample_query_vector_async.py new file mode 100644 index 000000000000..0f742646e0f7 --- /dev/null +++ b/sdk/search/azure-search-documents/samples/async_samples/sample_query_vector_async.py @@ -0,0 +1,272 @@ +# coding: utf-8 + +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# -------------------------------------------------------------------------- + +""" +DESCRIPTION: + Demonstrates how to create a vector-enabled index, upload documents with + pre-computed DescriptionVector values, and run vector queries. + +USAGE: + python sample_query_vector_async.py + + Set the following environment variables before running the sample: + 1) AZURE_SEARCH_SERVICE_ENDPOINT - base URL of your Azure AI Search service + 2) AZURE_SEARCH_API_KEY - the primary admin key for your search service + +NOTE: + This sample uses a pre-computed vector for the query "quintessential lodging + near running trails, eateries, retail" instead of calling an embedding API. + The vector was generated using text-embedding-ada-002 (1536 dimensions). +""" + +import asyncio +import json +import os +from pathlib import Path + +from azure.core.credentials import AzureKeyCredential +from azure.search.documents.aio import SearchClient +from azure.search.documents.models import VectorizedQuery +from azure.search.documents.indexes.aio import SearchIndexClient +from azure.search.documents.indexes.models import ( + SimpleField, + SearchField, + SearchFieldDataType, + SearchableField, + SearchIndex, + SemanticConfiguration, + SemanticField, + SemanticPrioritizedFields, + SemanticSearch, + VectorSearch, + VectorSearchProfile, + HnswAlgorithmConfiguration, + ExhaustiveKnnAlgorithmConfiguration, +) + +service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] +key = os.environ["AZURE_SEARCH_API_KEY"] + +index_name = "hotels-sample-index-query-vector" + +data_dir = Path(__file__).resolve().parents[1] / "data" +documents_path = data_dir / "hotels_with_description_vector.json" +query_vector_path = data_dir / "query_vector.json" + + +def load_query_vector(query_vector_path): + """Load the query vector from the samples/data folder.""" + with open(query_vector_path, "r", encoding="utf-8") as handle: + return json.load(handle) + + +vector = load_query_vector(query_vector_path) + + +async def create_index_async(): + """Create or update the vector-enabled search index.""" + index_client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) + + fields = [ + SimpleField( + name="HotelId", type=SearchFieldDataType.String, key=True, filterable=True + ), + SearchableField( + name="HotelName", type=SearchFieldDataType.String, sortable=True + ), + SearchableField(name="Description", type=SearchFieldDataType.String), + SearchField( + name="DescriptionVector", + type=SearchFieldDataType.Collection(SearchFieldDataType.Single), + searchable=True, + vector_search_dimensions=1536, + vector_search_profile_name="my-vector-profile", + ), + SearchableField( + name="Category", + type=SearchFieldDataType.String, + sortable=True, + filterable=True, + facetable=True, + ), + SearchField( + name="Tags", + type=SearchFieldDataType.Collection(SearchFieldDataType.String), + searchable=True, + filterable=True, + facetable=True, + ), + ] + + vector_search = VectorSearch( + algorithms=[ + HnswAlgorithmConfiguration(name="my-hnsw-vector-config-1", kind="hnsw"), + ExhaustiveKnnAlgorithmConfiguration( + name="my-eknn-vector-config", kind="exhaustiveKnn" + ), + ], + profiles=[ + VectorSearchProfile( + name="my-vector-profile", + algorithm_configuration_name="my-hnsw-vector-config-1", + ) + ], + ) + + semantic_config = SemanticConfiguration( + name="my-semantic-config", + prioritized_fields=SemanticPrioritizedFields( + title_field=SemanticField(field_name="HotelName"), + content_fields=[SemanticField(field_name="Description")], + keywords_fields=[SemanticField(field_name="Category")], + ), + ) + + semantic_search = SemanticSearch(configurations=[semantic_config]) + + index = SearchIndex( + name=index_name, + fields=fields, + vector_search=vector_search, + semantic_search=semantic_search, + ) + + async with index_client: + result = await index_client.create_or_update_index(index) + print(f"Created: index '{result.name}'") + + +def load_documents(): + with open(documents_path, "r", encoding="utf-8") as handle: + raw = handle.read().strip() + + payload = json.loads(raw) + documents = payload["value"] + + return documents + + +async def upload_documents_async(): + """Upload documents to the search index.""" + search_client = SearchClient(service_endpoint, index_name, AzureKeyCredential(key)) + documents = load_documents() + async with search_client: + result = await search_client.upload_documents(documents=documents) + print(f"Uploaded: {len(result)} documents to index '{index_name}'") + + +async def single_vector_search_async(): + """Perform a single vector search using a pre-computed query vector.""" + # [START single_vector_search_async] + search_client = SearchClient(service_endpoint, index_name, AzureKeyCredential(key)) + + vector_query = VectorizedQuery( + vector=vector, + k=5, + fields="DescriptionVector", + ) + + async with search_client: + results = await search_client.search( + vector_queries=[vector_query], + select=["HotelId", "HotelName", "Description", "Category", "Tags"], + top=5, + ) + + print("Results: single vector search") + async for result in results: + print( + f" HotelId: {result['HotelId']}, HotelName: {result['HotelName']}, " + f"Category: {result.get('Category')}" + ) + # [END single_vector_search_async] + + +async def single_vector_search_with_filter_async(): + """Perform a vector search with a filter applied.""" + # [START single_vector_search_with_filter_async] + search_client = SearchClient(service_endpoint, index_name, AzureKeyCredential(key)) + + vector_query = VectorizedQuery( + vector=vector, + k=5, + fields="DescriptionVector", + ) + + async with search_client: + results = await search_client.search( + vector_queries=[vector_query], + filter="Tags/any(tag: tag eq 'free wifi')", + select=["HotelId", "HotelName", "Description", "Category", "Tags"], + top=5, + ) + + print("Results: vector search with filter") + async for result in results: + print( + f" HotelId: {result['HotelId']}, HotelName: {result['HotelName']}, " + f"Tags: {result.get('Tags')}" + ) + # [END single_vector_search_with_filter_async] + + +async def simple_hybrid_search_async(): + """Perform a hybrid search combining vector and text search.""" + # [START simple_hybrid_search_async] + search_client = SearchClient(service_endpoint, index_name, AzureKeyCredential(key)) + + vector_query = VectorizedQuery( + vector=vector, + k=5, + fields="DescriptionVector", + ) + + async with search_client: + results = await search_client.search( + search_text="historic hotel walk to restaurants and shopping", + vector_queries=[vector_query], + select=["HotelId", "HotelName", "Description", "Category", "Tags"], + top=5, + ) + + print("Results: hybrid search") + async for result in results: + score = result.get("@search.score", "N/A") + print(f" Score: {score}") + print(f" HotelId: {result['HotelId']}") + print(f" HotelName: {result['HotelName']}") + print(f" Description: {result.get('Description')}") + print(f" Category: {result.get('Category')}") + print(f" Tags: {result.get('Tags', 'N/A')}") + print() + # [END simple_hybrid_search_async] + + +async def delete_index_async(): + index_client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) + async with index_client: + await index_client.delete_index(index_name) + print(f"Deleted: index '{index_name}'") + + +if __name__ == "__main__": + print("Query: 'quintessential lodging near running trails, eateries, retail'") + + async def run(): + try: + await create_index_async() + await upload_documents_async() + await single_vector_search_async() + print() + await single_vector_search_with_filter_async() + print() + await simple_hybrid_search_async() + print() + finally: + await delete_index_async() + + asyncio.run(run()) diff --git a/sdk/search/azure-search-documents/samples/async_samples/sample_search_client_custom_request_async.py b/sdk/search/azure-search-documents/samples/async_samples/sample_search_client_custom_request_async.py new file mode 100644 index 000000000000..24fe1a28be00 --- /dev/null +++ b/sdk/search/azure-search-documents/samples/async_samples/sample_search_client_custom_request_async.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# -------------------------------------------------------------------------- + +""" +DESCRIPTION: + Demonstrates how to make custom HTTP requests using SearchClient. + +USAGE: + python sample_search_client_custom_request_async.py + + Set the following environment variables before running the sample: + 1) AZURE_SEARCH_SERVICE_ENDPOINT - base URL of your Azure AI Search service + 2) AZURE_SEARCH_INDEX_NAME - target search index name (e.g., "hotels-sample-index") + 3) AZURE_SEARCH_API_KEY - the primary admin key for your search service +""" + +import asyncio +import os + +service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] +index_name = os.environ["AZURE_SEARCH_INDEX_NAME"] +key = os.environ["AZURE_SEARCH_API_KEY"] + + +async def sample_send_request_async(): + from azure.core.credentials import AzureKeyCredential + from azure.core.rest import HttpRequest + from azure.search.documents.aio import SearchClient + import sys + from pathlib import Path + + sys.path.append(str(Path(__file__).resolve().parents[1])) + from sample_utils import AZURE_SEARCH_API_VERSION + + search_client = SearchClient(service_endpoint, index_name, AzureKeyCredential(key)) + + # The `send_request` method can send custom HTTP requests that share the client's existing pipeline, + # while adding convenience for endpoint construction. + request = HttpRequest( + method="GET", url=f"/docs/$count?api-version={AZURE_SEARCH_API_VERSION}" + ) + async with search_client: + response = await search_client.send_request(request) + response.raise_for_status() + response_body = response.json() + print(f"Document count: {response_body} (index '{index_name}')") + + +if __name__ == "__main__": + asyncio.run(sample_send_request_async()) diff --git a/sdk/search/azure-search-documents/samples/async_samples/sample_search_client_send_request_async.py b/sdk/search/azure-search-documents/samples/async_samples/sample_search_client_send_request_async.py deleted file mode 100644 index 3a9c0c13d185..000000000000 --- a/sdk/search/azure-search-documents/samples/async_samples/sample_search_client_send_request_async.py +++ /dev/null @@ -1,49 +0,0 @@ -# coding: utf-8 - -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- - -""" -FILE: sample_search_client_send_request_async.py - -DESCRIPTION: - This sample demonstrates how to make custom HTTP requests through a client pipeline. - -USAGE: - python sample_search_client_send_request_async.py - - Set the environment variables with your own values before running the sample: - 1) AZURE_SEARCH_SERVICE_ENDPOINT - the endpoint of your Azure Cognitive Search service - 2) AZURE_SEARCH_INDEX_NAME - the name of your search index (e.g. "hotels-sample-index") - 3) AZURE_SEARCH_API_KEY - your search API key -""" - -import asyncio -import os -from azure.core.credentials import AzureKeyCredential -from azure.core.rest import HttpRequest -from azure.search.documents.aio import SearchClient - - -async def sample_send_request(): - endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] - index_name = os.environ["AZURE_SEARCH_INDEX_NAME"] - key = os.environ["AZURE_SEARCH_API_KEY"] - - client = SearchClient(endpoint, index_name, AzureKeyCredential(key)) - - # The `send_request` method can send custom HTTP requests that share the client's existing pipeline, - # while adding convenience for endpoint construction. - request = HttpRequest(method="GET", url=f"/docs/$count?api-version=2024-05-01-preview") - async with client: - response = await client.send_request(request) - response.raise_for_status() - response_body = response.json() - print(response_body) - - -if __name__ == "__main__": - asyncio.run(sample_send_request()) diff --git a/sdk/search/azure-search-documents/samples/async_samples/sample_semantic_search_async.py b/sdk/search/azure-search-documents/samples/async_samples/sample_semantic_search_async.py deleted file mode 100644 index 42a0cb2874ee..000000000000 --- a/sdk/search/azure-search-documents/samples/async_samples/sample_semantic_search_async.py +++ /dev/null @@ -1,71 +0,0 @@ -# coding: utf-8 - -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- - -""" -FILE: sample_semantic_search_async.py -DESCRIPTION: - This sample demonstrates how to use semantic search. -USAGE: - python sample_semantic_search_async.py - - Set the environment variables with your own values before running the sample: - 1) AZURE_SEARCH_SERVICE_ENDPOINT - the endpoint of your Azure Cognitive Search service - 2) AZURE_SEARCH_INDEX_NAME - the name of your search index (e.g. "hotels-sample-index") - 3) AZURE_SEARCH_API_KEY - your search API key -""" - -import os -import asyncio - - -async def speller(): - # [START speller_async] - from azure.core.credentials import AzureKeyCredential - from azure.search.documents.aio import SearchClient - - service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] - index_name = os.environ["AZURE_SEARCH_INDEX_NAME"] - key = os.environ["AZURE_SEARCH_API_KEY"] - - credential = AzureKeyCredential(key) - client = SearchClient(endpoint=service_endpoint, index_name=index_name, credential=credential) - results = await client.search(search_text="luxury", query_language="en-us", query_speller="lexicon") - - async for result in results: - print("{}\n{}\n)".format(result["hotelId"], result["hotelName"])) - # [END speller_async] - - -async def semantic_ranking(): - # [START semantic_ranking_async] - from azure.core.credentials import AzureKeyCredential - from azure.search.documents import SearchClient - - service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] - index_name = os.environ["AZURE_SEARCH_INDEX_NAME"] - key = os.environ["AZURE_SEARCH_API_KEY"] - - credential = AzureKeyCredential(key) - client = SearchClient(endpoint=service_endpoint, index_name=index_name, credential=credential) - results = list( - client.search( - search_text="luxury", - query_type="semantic", - semantic_configuration_name="semantic_config_name", - query_language="en-us", - ) - ) - - for result in results: - print("{}\n{}\n)".format(result["hotelId"], result["hotelName"])) - # [END semantic_ranking_async] - - -if __name__ == "__main__": - asyncio.run(speller()) - asyncio.run(semantic_ranking()) diff --git a/sdk/search/azure-search-documents/samples/async_samples/sample_simple_query_async.py b/sdk/search/azure-search-documents/samples/async_samples/sample_simple_query_async.py deleted file mode 100644 index d5f62acb4ff6..000000000000 --- a/sdk/search/azure-search-documents/samples/async_samples/sample_simple_query_async.py +++ /dev/null @@ -1,49 +0,0 @@ -# coding: utf-8 - -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- - -""" -FILE: sample_simple_query_async.py -DESCRIPTION: - This sample demonstrates how to get search results from a basic search text - from an Azure Search index. -USAGE: - python sample_simple_query_async.py - - Set the environment variables with your own values before running the sample: - 1) AZURE_SEARCH_SERVICE_ENDPOINT - the endpoint of your Azure Cognitive Search service - 2) AZURE_SEARCH_INDEX_NAME - the name of your search index (e.g. "hotels-sample-index") - 3) AZURE_SEARCH_API_KEY - your search API key -""" - -import os -import asyncio - - -service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] -index_name = os.environ["AZURE_SEARCH_INDEX_NAME"] -key = os.environ["AZURE_SEARCH_API_KEY"] - - -async def simple_text_query(): - # [START simple_query_async] - from azure.core.credentials import AzureKeyCredential - from azure.search.documents.aio import SearchClient - - search_client = SearchClient(service_endpoint, index_name, AzureKeyCredential(key)) - - async with search_client: - results = await search_client.search(search_text="spa") - - print("Hotels containing 'spa' in the name (or other fields):") - async for result in results: - print(" Name: {} (rating {})".format(result["hotelName"], result["rating"])) - # [END simple_query_async] - - -if __name__ == "__main__": - asyncio.run(simple_text_query()) diff --git a/sdk/search/azure-search-documents/samples/async_samples/sample_synonym_map_operations_async.py b/sdk/search/azure-search-documents/samples/async_samples/sample_synonym_map_operations_async.py deleted file mode 100644 index d45d375563d9..000000000000 --- a/sdk/search/azure-search-documents/samples/async_samples/sample_synonym_map_operations_async.py +++ /dev/null @@ -1,80 +0,0 @@ -# coding: utf-8 - -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- - -""" -FILE: sample_synonym_map_operations_async.py -DESCRIPTION: - This sample demonstrates how to get, create, update, or delete a Synonym Map. -USAGE: - python sample_synonym_map_operations_async.py - - Set the environment variables with your own values before running the sample: - 1) AZURE_SEARCH_SERVICE_ENDPOINT - the endpoint of your Azure Cognitive Search service - 2) AZURE_SEARCH_API_KEY - your search API key -""" - -import asyncio -import os - -service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] -key = os.environ["AZURE_SEARCH_API_KEY"] - -from azure.core.credentials import AzureKeyCredential -from azure.search.documents.indexes.aio import SearchIndexClient -from azure.search.documents.indexes.models import SynonymMap - -client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) - - -async def create_synonym_map(): - # [START create_synonym_map_async] - synonyms = [ - "USA, United States, United States of America", - "Washington, Wash. => WA", - ] - synonym_map = SynonymMap(name="test-syn-map", synonyms=synonyms) - result = await client.create_synonym_map(synonym_map) - print("Create new Synonym Map 'test-syn-map succeeded") - # [END create_synonym_map_async] - - -async def get_synonym_maps(): - # [START get_synonym_maps_async] - result = await client.get_synonym_maps() - names = [x.name for x in result] - print("Found {} Synonym Maps in the service: {}".format(len(result), ", ".join(names))) - # [END get_synonym_maps_async] - - -async def get_synonym_map(): - # [START get_synonym_map_async] - result = await client.get_synonym_map("test-syn-map") - print("Retrived Synonym Map 'test-syn-map' with synonyms") - if result: - for syn in result.synonyms: - print(" {}".format(syn)) - # [END get_synonym_map_async] - - -async def delete_synonym_map(): - # [START delete_synonym_map_async] - await client.delete_synonym_map("test-syn-map") - print("Synonym Map 'test-syn-map' deleted") - # [END delete_synonym_map_async] - - -async def main(): - await create_synonym_map() - await get_synonym_maps() - await get_synonym_map() - await delete_synonym_map() - await client.close() - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/sdk/search/azure-search-documents/samples/async_samples/sample_vector_search_async.py b/sdk/search/azure-search-documents/samples/async_samples/sample_vector_search_async.py deleted file mode 100644 index 269226f313d6..000000000000 --- a/sdk/search/azure-search-documents/samples/async_samples/sample_vector_search_async.py +++ /dev/null @@ -1,203 +0,0 @@ -# coding: utf-8 - -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- - -""" -FILE: sample_vector_search_async.py -DESCRIPTION: - This sample demonstrates how to get search results from a basic search text - from an Azure Search index. -USAGE: - python sample_vector_search_async.py - - Set the environment variables with your own values before running the sample: - 1) AZURE_SEARCH_SERVICE_ENDPOINT - the endpoint of your Azure Cognitive Search service - 2) AZURE_SEARCH_INDEX_NAME - the name of your search index (e.g. "hotels-sample-index") - 3) AZURE_SEARCH_API_KEY - your search API key -""" - -import os -import asyncio - -from azure.core.credentials import AzureKeyCredential -from azure.search.documents.aio import SearchClient -from azure.search.documents.indexes import SearchIndexClient -from azure.search.documents.models import VectorizedQuery - -service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] -index_name = os.environ["AZURE_SEARCH_INDEX_NAME"] -key = os.environ["AZURE_SEARCH_API_KEY"] - - -def get_embeddings(text: str): - # There are a few ways to get embeddings. This is just one example. - import openai - - open_ai_endpoint = os.getenv("OpenAIEndpoint") - open_ai_key = os.getenv("OpenAIKey") - - client = openai.AzureOpenAI( - azure_endpoint=open_ai_endpoint, - api_key=open_ai_key, - api_version="2023-09-01-preview", - ) - embedding = client.embeddings.create(input=[text], model="text-embedding-ada-002") - return embedding.data[0].embedding - - -def get_hotel_index(name: str): - from azure.search.documents.indexes.models import ( - SearchIndex, - SearchField, - SearchFieldDataType, - SimpleField, - SearchableField, - VectorSearch, - VectorSearchProfile, - HnswAlgorithmConfiguration, - ) - - fields = [ - SimpleField(name="hotelId", type=SearchFieldDataType.String, key=True), - SearchableField(name="hotelName", type=SearchFieldDataType.String, sortable=True, filterable=True), - SearchableField(name="description", type=SearchFieldDataType.String), - SearchField( - name="descriptionVector", - type=SearchFieldDataType.Collection(SearchFieldDataType.Single), - searchable=True, - vector_search_dimensions=1536, - vector_search_profile_name="my-vector-config", - ), - SearchableField( - name="category", type=SearchFieldDataType.String, sortable=True, filterable=True, facetable=True - ), - ] - vector_search = VectorSearch( - profiles=[VectorSearchProfile(name="my-vector-config", algorithm_configuration_name="my-algorithms-config")], - algorithms=[HnswAlgorithmConfiguration(name="my-algorithms-config")], - ) - return SearchIndex(name=name, fields=fields, vector_search=vector_search) - - -def get_hotel_documents(): - docs = [ - { - "hotelId": "1", - "hotelName": "Fancy Stay", - "description": "Best hotel in town if you like luxury hotels.", - "descriptionVector": get_embeddings("Best hotel in town if you like luxury hotels."), - "category": "Luxury", - }, - { - "hotelId": "2", - "hotelName": "Roach Motel", - "description": "Cheapest hotel in town. Infact, a motel.", - "descriptionVector": get_embeddings("Cheapest hotel in town. Infact, a motel."), - "category": "Budget", - }, - { - "hotelId": "3", - "hotelName": "EconoStay", - "description": "Very popular hotel in town.", - "descriptionVector": get_embeddings("Very popular hotel in town."), - "category": "Budget", - }, - { - "hotelId": "4", - "hotelName": "Modern Stay", - "description": "Modern architecture, very polite staff and very clean. Also very affordable.", - "descriptionVector": get_embeddings( - "Modern architecture, very polite staff and very clean. Also very affordable." - ), - "category": "Luxury", - }, - { - "hotelId": "5", - "hotelName": "Secret Point", - "description": "One of the best hotel in town. The hotel is ideally located on the main commercial artery of the city in the heart of New York.", - "descriptionVector": get_embeddings( - "One of the best hotel in town. The hotel is ideally located on the main commercial artery of the city in the heart of New York." - ), - "category": "Boutique", - }, - ] - return docs - - -async def single_vector_search(): - # [START single_vector_search] - query = "Top hotels in town" - - search_client = SearchClient(service_endpoint, index_name, AzureKeyCredential(key)) - vector_query = VectorizedQuery(vector=get_embeddings(query), k_nearest_neighbors=3, fields="descriptionVector") - - async with search_client: - results = await search_client.search( - search_text="", - vector_queries=[vector_query], - select=["hotelId", "hotelName"], - ) - - async for result in results: - print(result) - # [END single_vector_search] - - -async def single_vector_search_with_filter(): - # [START single_vector_search_with_filter] - query = "Top hotels in town" - - search_client = SearchClient(service_endpoint, index_name, AzureKeyCredential(key)) - vector_query = VectorizedQuery(vector=get_embeddings(query), k_nearest_neighbors=3, fields="descriptionVector") - - async with search_client: - results = await search_client.search( - vector_queries=[vector_query], - filter="category eq 'Luxury'", - select=["hotelId", "hotelName"], - ) - - async for result in results: - print(result) - # [END single_vector_search_with_filter] - - -async def simple_hybrid_search(): - # [START simple_hybrid_search] - query = "Top hotels in town" - - search_client = SearchClient(service_endpoint, index_name, AzureKeyCredential(key)) - vector_query = VectorizedQuery(vector=get_embeddings(query), k_nearest_neighbors=3, fields="descriptionVector") - - async with search_client: - results = await search_client.search( - search_text=query, - vector_queries=[vector_query], - select=["hotelId", "hotelName"], - ) - - async for result in results: - print(result) - # [END simple_hybrid_search] - - -async def main(): - credential = AzureKeyCredential(key) - index_client = SearchIndexClient(service_endpoint, credential) - index = get_hotel_index(index_name) - index_client.create_index(index) - client = SearchClient(service_endpoint, index_name, credential) - hotel_docs = get_hotel_documents() - await client.upload_documents(documents=hotel_docs) - - await single_vector_search() - await single_vector_search_with_filter() - await simple_hybrid_search() - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/sdk/search/azure-search-documents/samples/data/hotels_with_description_vector.json b/sdk/search/azure-search-documents/samples/data/hotels_with_description_vector.json new file mode 100644 index 000000000000..c2498e5494d4 --- /dev/null +++ b/sdk/search/azure-search-documents/samples/data/hotels_with_description_vector.json @@ -0,0 +1,10855 @@ +{ + "value": [ + { + "@search.action": "mergeOrUpload", + "HotelId": "1", + "HotelName": "Stay-Kay City Hotel", + "Description": "This classic hotel is fully-refurbished and ideally located on the main commercial artery of the city in the heart of New York. A few minutes away is Times Square and the historic centre of the city, as well as other places of interest that make New York one of America's most attractive and cosmopolitan cities.", + "DescriptionVector": [ + -0.048865054, + -0.020307425, + 0.017633565, + 0.023875887, + -0.04401433, + -0.021689085, + -0.04437217, + 0.011500583, + 0.03840817, + 0.00029058976, + 0.016907945, + -0.009214383, + -0.04512761, + 0.019889945, + 0.020973407, + 0.023040926, + -0.026539808, + 0.050495215, + 0.07152826, + -0.008786962, + -0.009994673, + -0.0053129313, + -0.014601864, + -0.048069853, + 0.021231845, + 0.022066806, + -0.018021226, + -0.010526463, + 0.07220418, + 0.0068685417, + 0.009472823, + -0.023239726, + 0.040276892, + 0.03399481, + 0.0156058045, + -0.001837658, + -0.009567252, + -0.03630089, + 0.009010613, + 0.027672967, + -0.023398766, + 0.030078448, + 0.018428765, + -0.006709502, + -0.03598281, + -0.018021226, + -0.017782666, + 0.06655826, + -0.019909825, + 0.010963823, + -0.028428407, + 0.007325782, + -0.030833889, + -0.045724012, + -0.0780489, + 0.024253607, + 0.018220024, + -0.022762606, + 0.056777295, + 0.007817812, + 0.03355745, + 0.029163968, + 0.031967048, + 0.029959168, + -0.051568735, + 0.057294175, + -0.0156157445, + 0.03759309, + -0.046002332, + -0.020396886, + 0.053278416, + 0.016371185, + 0.03170861, + -0.015685324, + 0.0010555041, + 0.024094567, + 0.0051886817, + 0.012872304, + 0.004055521, + -0.03315985, + -0.013568103, + -0.023359006, + -0.072243944, + 0.026480168, + 0.025068687, + 0.009010613, + -0.018090805, + -0.025207847, + 0.009408212, + 0.0025123358, + 0.024591567, + -0.003725016, + -0.0053924513, + -0.025227727, + -0.055385694, + 0.012136743, + -0.011709323, + -0.041310653, + -0.021828245, + 0.04373601, + 0.030217608, + 0.023199966, + -0.012912064, + 0.020277606, + 0.021609565, + -0.031887528, + 0.014164504, + -0.062264178, + 0.03315985, + 0.0034218458, + -0.07550426, + 0.007653802, + -0.04544569, + -0.030973049, + -0.0029298158, + 0.041708253, + 0.053198896, + -0.03379601, + -0.010834603, + 0.025168087, + -0.031569447, + -0.023836127, + -0.025088567, + -0.009935033, + 0.0017009829, + -0.03395505, + 0.03174837, + -0.030814009, + -0.0155958645, + -0.0030192758, + 0.009477792, + -0.024830127, + -0.046757773, + 0.0055216714, + -0.015069044, + 0.024015047, + 0.015735025, + -0.020655327, + -0.020357126, + 0.015287724, + 0.003705136, + -0.03389541, + -0.026142208, + -0.041390173, + -0.03705633, + 0.06818842, + 0.03186765, + 0.007181652, + -0.012802724, + 0.030694729, + 0.025366887, + 0.064729296, + 0.029680848, + -0.011639743, + -0.0016351305, + 0.0029944258, + 0.021788485, + -0.017921826, + -0.03486953, + 0.040992573, + -0.021629445, + 0.03576413, + -0.07232346, + 0.004868116, + 0.055783294, + 0.031112209, + -0.046121612, + -0.049262654, + -0.04500833, + -0.023021046, + 0.03538641, + -0.020536046, + 0.006500762, + 0.031808008, + 0.03359721, + 0.052920576, + -0.017812485, + -0.014949764, + 0.028845888, + 0.019780606, + 0.019999286, + -0.020874007, + 0.0865973, + -0.057691775, + 0.019442646, + 0.03190741, + -0.079122424, + 0.046519212, + 0.018170325, + 0.012196383, + 0.013448824, + 0.009865453, + -0.0850069, + 0.0057204715, + -0.03270261, + 0.051727775, + -0.03242429, + -0.041151613, + 0.012902124, + 0.0003308157, + -0.011937943, + 0.0045102765, + 0.018617624, + -0.016401004, + -0.018369125, + 0.009716352, + 0.0052185017, + -0.024850007, + 0.019880006, + 0.03294117, + -0.004353721, + -0.04373601, + 0.019134505, + 0.0693017, + -0.016222084, + -0.03570449, + -0.050018094, + 0.003702651, + -0.028448287, + 0.047791533, + 0.00023576444, + 0.0012723204, + 0.0047712014, + 0.028030807, + -0.026162088, + 0.06846674, + -0.0069281817, + -0.025963288, + -0.004067946, + 0.011848483, + 0.0010604741, + -0.013090984, + -0.024174087, + -0.029541688, + -0.014224144, + 0.04238417, + 0.007236322, + 0.0034392409, + -0.03447193, + -0.013001524, + -0.03357733, + 0.007017642, + -0.008697502, + 0.011450883, + 0.030058568, + -0.019154385, + -0.014104864, + -0.022822246, + -0.011013523, + 0.024631327, + -0.0059391516, + 0.03238453, + 0.03644005, + -0.028925408, + 0.020774607, + -0.0029447258, + 0.0016053105, + 0.015426884, + 0.041946813, + 0.025426527, + 0.019094745, + -0.000408472, + 0.056061614, + -0.024492167, + -0.012385244, + -0.046996333, + -0.054868814, + 0.030694729, + 0.00025517851, + -0.059918337, + -0.045843292, + 0.0029571508, + -0.0068486617, + -0.03745393, + 0.03638041, + -0.031092329, + 0.0055167014, + 3.5877198e-05, + -0.042145614, + -0.0138861835, + -0.022086686, + -0.03785153, + 0.07232346, + -0.013031344, + -0.018657384, + -0.006461002, + -0.013826543, + 0.029422408, + -0.023716846, + 0.007141892, + -0.0025309732, + 0.0026788306, + 0.011659623, + -0.03838829, + -0.00011531956, + -0.007922182, + 0.022881886, + -0.06938122, + 0.002265078, + -0.0021681632, + -0.023736726, + 0.0750669, + 0.03610209, + -0.014820544, + -0.018041106, + 0.061429217, + 0.003287656, + -0.029800128, + 0.020436646, + 0.022941526, + -0.0022812306, + 0.020237846, + -0.019184206, + -0.0716873, + -0.022066806, + -0.039879292, + 0.014701264, + -0.0058447216, + -0.032245368, + 0.0060137017, + 0.010049343, + -0.021470405, + -0.0050147315, + 0.007718412, + 0.057413455, + -0.023657206, + 0.011798783, + 0.025943408, + -0.009199472, + -0.0021818306, + 0.040952813, + -0.032682728, + 0.018190205, + -0.0026639206, + 0.022444526, + 0.016629625, + -0.015466644, + -0.014800664, + 0.024512047, + 0.0016475555, + 0.014512404, + -0.058327936, + -0.012653624, + -0.010049343, + 0.064331695, + -0.025983168, + -0.010337603, + -0.017971525, + -0.013677443, + -0.010993643, + -0.056817055, + -0.027593447, + -0.009542403, + 0.010009583, + 0.014422944, + 0.014850364, + 0.007609072, + 0.054550733, + -0.011073163, + 0.039839532, + -0.024452407, + -0.024929527, + 0.017822426, + -0.007151832, + 0.014760904, + 0.007256202, + -0.045724012, + 0.009646772, + -0.027692847, + 0.017395005, + -0.007678652, + 0.0056459215, + 0.013220204, + 0.009607012, + -0.064013615, + 0.017116684, + -0.001591643, + 0.008886362, + -0.04234441, + -0.041310653, + -0.0020849155, + -0.04294081, + 0.013478644, + -0.028388647, + -0.010526463, + 0.022265606, + -0.004798536, + -0.014870244, + -0.027573567, + 0.057015855, + 0.04492881, + -0.011560223, + 0.0049749715, + -0.008364513, + -0.011540343, + 0.010228263, + 0.015029284, + 0.052960336, + -0.021331245, + -0.0029397558, + 0.058407456, + -0.04341793, + -0.011083103, + -0.022524046, + 0.03178813, + -0.014661504, + -0.03381589, + -0.055226654, + -0.0069679418, + 0.0030714609, + -0.03801057, + -0.023796367, + 0.008453973, + -0.019015225, + 0.024273487, + 0.007400332, + 0.04262273, + -0.026818128, + -0.0122659635, + 0.004773686, + 0.047155373, + 0.03572437, + 0.03767261, + 0.03482977, + 0.012941884, + 0.018597744, + 0.0012580316, + -0.012216263, + -0.07781034, + -0.03226525, + 0.015019344, + -0.0059987917, + 0.023120446, + 0.057135135, + -0.019989345, + 0.060196657, + -0.020416766, + 0.012434944, + -0.024392767, + -0.021072807, + -0.057612255, + -0.016659444, + -0.04242393, + -0.03741417, + 0.023040926, + 0.051250655, + 0.012434944, + 0.062184658, + 0.011480703, + 0.0044829412, + 0.0021992256, + -0.017723026, + -0.020327305, + 0.021132445, + -0.03870637, + 0.07005714, + 0.019383006, + -0.016540164, + 0.03753345, + -0.03630089, + 0.018786605, + -0.010755083, + -0.022186086, + -0.0048109614, + 0.052284416, + 0.023418646, + -0.011997583, + 0.017563986, + 0.024671087, + -0.013130744, + -0.04520713, + -0.030376649, + 0.04298057, + 0.04266249, + 0.018667325, + -0.03809009, + -0.029621208, + 0.053397696, + 0.041787773, + -0.018776665, + 0.015347364, + -0.004224501, + -0.03628101, + -0.030654969, + -0.024333127, + -0.004423301, + -0.050813295, + 0.021689085, + -0.002314778, + 0.016748905, + 0.008260142, + 0.020476406, + 0.052801296, + -0.004714046, + 0.015814545, + -0.0106358025, + -0.049103614, + -0.027513927, + -0.030913409, + -0.016301604, + -0.008329722, + 0.040833533, + 0.03230501, + -0.050614495, + 0.014492524, + 0.03178813, + -0.011361423, + -0.024472287, + 0.017375125, + -0.018895945, + -0.032245368, + 0.017544106, + -0.017315485, + 0.010437003, + -0.024333127, + 0.020317366, + 0.010685503, + 0.012484644, + 0.011629803, + -0.015874185, + 0.009602043, + 0.001994213, + -0.023895767, + 0.030575449, + 0.036121972, + 0.039680492, + -0.03906421, + -0.028726608, + 0.026818128, + 0.027275367, + -0.018548045, + -0.03502857, + -0.014949764, + 0.03822925, + 0.010347543, + 0.0016599805, + -0.009626892, + -0.021708965, + 0.011669563, + 0.0053328113, + 0.060554497, + 0.003690226, + -0.0015096379, + -0.030257368, + -0.026261488, + -0.0060137017, + -0.03385565, + -0.010054313, + 0.024909647, + -0.024154207, + 0.011232203, + 0.0012617591, + 0.008881393, + 0.03846781, + 0.014939824, + 0.009318752, + 0.048030093, + -0.016351305, + -0.014234084, + 0.019621566, + -0.055504974, + 0.023677086, + -0.021013167, + 0.001827718, + 0.03508821, + 0.003188256, + 0.025585568, + 0.04262273, + 0.0028428407, + -0.026639208, + 0.023319246, + -0.014353364, + -0.014661504, + -0.003752351, + 0.0155958645, + 0.010526463, + 0.012096983, + 0.010745143, + 0.013498524, + 0.049739774, + 0.03357733, + -0.016162444, + 0.023279486, + 0.021152325, + -0.04298057, + 0.013975644, + 0.018329365, + 0.025187967, + -0.017563986, + 0.03592317, + 0.006396392, + -0.007166742, + 0.04512761, + 0.0057055615, + -0.010675563, + 0.006267172, + -0.04302033, + -0.0021656782, + -0.03371649, + 0.026838008, + -0.028567567, + 0.0015270329, + 0.003747381, + -0.011102983, + 0.004674286, + 0.025963288, + -0.008419182, + 0.023100566, + 0.010397243, + -0.025923528, + 0.012723204, + 0.0028030807, + 0.022782486, + 0.011699383, + -0.029979048, + 0.03618161, + -0.0041524363, + -0.007822782, + 0.019363126, + -0.026678968, + -0.048825294, + 0.041787773, + 0.03250381, + -0.0009753628, + 0.0054620313, + 0.017086865, + -0.011679503, + -0.022245726, + -0.010153713, + -0.029641088, + 0.013289784, + 0.023696966, + 0.048109613, + -0.003876601, + -0.0017991405, + 0.020714967, + 0.03528701, + -0.027494047, + -0.017822426, + -0.000420897, + -0.018418824, + 0.024531927, + 0.040634733, + 0.028150087, + -0.010755083, + -0.008439062, + 0.04461073, + 0.007499732, + -0.025863888, + 0.030555569, + -0.028746488, + 0.0027633207, + -0.0028080507, + -0.008106072, + -0.0053924513, + 0.0058099316, + 0.031450167, + 0.023498166, + 0.011490643, + -0.003946181, + 0.007385422, + 0.026639208, + -0.04234441, + 0.0034119058, + -0.047274653, + 0.0052980212, + 0.0026142208, + 0.025207847, + -0.010755083, + 0.012762964, + 0.031132089, + 0.012474704, + 0.022265606, + 0.04282153, + 0.007355602, + 0.0097561125, + -0.016609745, + -0.058009855, + -0.020893887, + -0.029959168, + -0.000963559, + 0.017255845, + -0.020635447, + -0.009129892, + 0.023597566, + -0.023677086, + -0.011659623, + 0.026420528, + 0.024929527, + 0.0015096379, + 0.007286022, + 0.016013345, + 0.029342888, + -0.012494584, + -0.015387124, + -0.046638492, + 0.0005808689, + -0.009020553, + -0.010506583, + -0.0022961407, + -0.0017320454, + -0.019720966, + 0.015188324, + -0.020734847, + 0.001775533, + -0.0022439556, + 0.029919408, + -0.003777201, + 0.06484858, + -0.003720046, + -0.008150802, + -0.015516344, + 0.013786783, + -0.06635946, + 0.007117042, + -0.0049724863, + 0.00073990895, + 0.011102983, + -0.020098686, + -0.0044879112, + -0.017116684, + 0.028547687, + 0.0058149016, + -0.012603924, + -0.03693705, + -0.020317366, + -0.0024029957, + -0.020933647, + -0.024392767, + -0.003772231, + 0.030654969, + 0.010456883, + -0.030873649, + -0.03606233, + -0.008230322, + -0.000918829, + -0.023696966, + 0.001904753, + -0.04512761, + -0.013438884, + -0.025585568, + -0.029183848, + -0.03375625, + 0.010327663, + 0.03242429, + -0.009050372, + 0.008160742, + -0.03427313, + -0.049183134, + 0.029422408, + -0.021987285, + 0.0025819158, + -0.011262023, + 0.014959704, + -0.0020724905, + -0.030953169, + 0.03453157, + -0.0027384707, + -0.011331603, + 0.060156897, + 0.008339662, + 0.00091448025, + -0.039322652, + -0.0013742053, + 0.03226525, + -0.03321949, + -0.002004153, + 0.06556426, + -0.008946002, + -0.023875887, + 0.0012238629, + 0.0013829028, + 0.013717203, + -0.022961406, + 0.026241608, + -0.024452407, + -0.028189847, + -0.020039046, + -0.0018836305, + -0.021430645, + 0.0052532917, + -0.018488405, + 0.011301783, + -0.0059292116, + 0.018319424, + 0.008066312, + -0.0002029935, + -0.024730727, + 0.010745143, + 0.023836127, + 0.022365006, + -0.018518224, + 0.022702966, + -0.007350632, + -0.0015531254, + -0.014025344, + 0.015407004, + -0.017275725, + -0.0014524829, + 0.006441122, + 0.003921331, + 0.0054570613, + -0.010715323, + -0.032483928, + -0.0026564656, + 0.03584365, + -0.0061429217, + 0.012951824, + 0.030992929, + -0.022504166, + 0.00093436026, + 0.017335365, + 0.04508785, + 0.03492917, + 0.007131952, + -0.0028826008, + -0.014442824, + 0.018269725, + 0.021271605, + -0.016053105, + -0.020108625, + -0.029760368, + -0.0052334117, + 0.040833533, + 0.002319748, + 0.0068337517, + -0.014780784, + 0.006769142, + 0.0054471213, + -0.00095424027, + -0.03910397, + 0.023975287, + -0.015198264, + 0.016381124, + 0.022702966, + -0.025923528, + 0.002024033, + -0.009621923, + -0.004868116, + -0.019899886, + 0.019522166, + 0.028329007, + -0.052284416, + -0.0010331391, + -0.027474167, + 0.014830484, + 0.008906242, + 0.03604245, + -0.023557806, + 0.023359006, + 0.0021731332, + -0.008488762, + 0.03659909, + -0.019750785, + -0.016361244, + 0.011729203, + -0.026559688, + -0.015814545, + 0.010516523, + -0.018180264, + -0.065047376, + 0.011550283, + 0.007619012, + -0.07443074, + -0.006709502, + 0.03789129, + -0.007305902, + -0.027692847, + -0.008583193, + 0.014949764, + -0.032006808, + 0.020078806, + 0.061667778, + 0.014015404, + 0.021788485, + 0.0029571508, + -0.0005147058, + 0.0044307564, + -0.015645564, + -0.07121018, + 0.007355602, + -0.014413004, + 0.03286165, + 0.015804604, + -0.0015481554, + 0.030674849, + -0.03640029, + 0.0097561125, + -0.017673325, + -0.013796723, + 0.03387553, + 0.019790545, + 0.026022928, + 0.009010613, + -0.025744608, + -0.0002453938, + -0.022842126, + 0.058447216, + -0.03719549, + 0.027633207, + -0.006774112, + -0.012146683, + 0.016768785, + -0.0045127613, + 0.009462883, + 0.039362412, + -0.03238453, + -0.004000851, + 0.020416766, + 0.018130565, + 0.0082551725, + 0.003792111, + -0.008220382, + -0.0090354625, + 0.016033225, + -0.00014125675, + -0.0026092508, + 0.015715145, + -0.0044009364, + -0.018776665, + -0.011202383, + 0.019522166, + -0.052841056, + -0.00079892774, + 0.041668493, + 0.03737441, + -0.013230144, + 0.021132445, + 0.004219531, + -0.04278177, + -0.018886006, + -0.021251725, + 0.024154207, + -0.009656712, + 0.04274201, + 0.014611804, + 0.001763108, + 0.0098058125, + -0.0060236417, + -0.0017519254, + 0.010864423, + 0.020675207, + -0.021033047, + 0.003993396, + 0.014452764, + -0.03993893, + 0.025466288, + 0.022524046, + -0.008598102, + 0.000807004, + -0.0023185057, + 0.017295605, + -0.012574104, + -0.023796367, + 0.012822604, + 0.025943408, + -0.004463061, + 0.0006352283, + 0.008439062, + -0.008886362, + -0.03353757, + 0.022563806, + -0.000978469, + -0.0009703928, + 0.0013207778, + -0.00025067443, + -0.047513213, + 0.06437146, + 0.010407183, + 0.0008132165, + -0.0051290416, + 0.029959168, + -0.0090652825, + -0.001760623, + 0.019035105, + 0.04329865, + 0.021887885, + 0.025227727, + 0.0029273308, + 0.030654969, + -0.024810247, + -0.022325246, + 0.024551807, + -0.028229607, + 0.031291127, + 0.029303128, + 0.03347793, + 0.020595687, + 0.022643326, + 0.030615209, + 0.03379601, + 0.006272142, + 0.016152505, + 0.0015730055, + -0.03451169, + 0.019293545, + 0.009159712, + -0.0017071954, + -0.011431003, + 0.03816961, + -0.027454287, + 0.024850007, + -0.016798604, + 0.019333305, + -0.0047314414, + 0.03447193, + -0.006421242, + 0.023557806, + 0.029004928, + -0.022126446, + 0.029124208, + 0.016639564, + 0.023060806, + 0.009527492, + -0.047234893, + 0.008667682, + 0.029521808, + 0.014104864, + -0.008444033, + 0.019273665, + 0.022484286, + -0.012504524, + 0.030118208, + 0.0024005107, + 0.0024676058, + -0.0082054725, + 0.026142208, + 0.010198443, + -0.001658738, + 0.021172205, + 0.023995167, + -0.009701443, + 0.0025471258, + 0.03282189, + 0.0058049615, + 0.0027956257, + 0.028428407, + -0.0034243308, + -0.0047960514, + 0.024193967, + -0.0139160035, + 0.012802724, + -0.008369482, + -0.011311723, + -0.031768247, + 0.032762248, + 0.0012226204, + 0.019780606, + 0.010496643, + 0.006510702, + -0.009244203, + -0.0060385517, + -0.007748232, + -0.015844364, + -0.020834247, + 0.0068586017, + 0.012057223, + -0.028309127, + 0.009000673, + -0.0058745416, + 0.015665444, + 0.007161772, + 0.024969287, + -0.040754013, + -0.024034927, + 0.012464764, + -0.041072093, + 0.0082154125, + -0.014711204, + 0.040754013, + -0.006441122, + 0.015377184, + -0.03797081, + -0.024571687, + 0.03170861, + -0.017285665, + -0.026122328, + -0.0013307178, + -0.010794843, + 0.006674712, + -0.004239411, + 0.052204896, + -0.023975287, + -0.0023011107, + -0.03347793, + 0.03162909, + 0.03651957, + -0.024512047, + 0.016560044, + 0.007763142, + 0.003367176, + 0.018647445, + -0.03323937, + 0.0077929622, + 0.015695265, + 0.004028186, + 0.011083103, + 0.016092865, + -0.014263904, + 0.030953169, + 0.002411693, + -0.013627743, + -0.017096804, + -0.0105960425, + -0.031151969, + 0.008752173, + 0.007499732, + -0.03703645, + -0.04524689, + -0.017454645, + -0.009318752, + 0.016331425, + 0.017941706, + 0.052761536, + 0.011331603, + 0.058168896, + 0.049103614, + 0.022424646, + 0.03250381, + 0.009537432, + -0.010934003, + -0.021569805, + 0.007236322, + 0.018895945, + -0.0015779755, + -0.020993287, + -0.008970853, + -0.029462168, + 0.008150802, + 0.012842484, + 0.013200324, + 0.00045506575, + -0.011778903, + 0.011540343, + 0.003225531, + 0.027752487, + -0.009522523, + 0.013727143, + 0.014681384, + 0.010516523, + -0.007867512, + 0.017216085, + -0.0047910814, + 0.003976001, + 0.03735453, + -0.025068687, + 0.04361673, + 0.04321913, + 0.029541688, + -0.0015208204, + 0.0051290416, + -0.0010256841, + 0.026500048, + -0.00057931576, + 0.023160206, + 0.0090354625, + -0.017454645, + -0.04345769, + -0.001827718, + -0.023498166, + -0.008125952, + 0.006217472, + 0.025486168, + -0.019432705, + 0.010407183, + 0.032006808, + -0.03250381, + 0.019373065, + 0.007107102, + -0.006585252, + 0.049183134, + 0.023597566, + 0.026460288, + 0.021271605, + 0.019263726, + 0.012087043, + 0.010864423, + 0.029601328, + 0.006490822, + 0.006341722, + -0.00012300753, + -0.0009902727, + 0.020476406, + -0.03453157, + 0.026539808, + -0.023796367, + -0.0057950215, + -0.003570946, + -0.013776843, + 0.00049513637, + 0.001666193, + 0.047195133, + 0.020377006, + -0.023875887, + 0.003623131, + 0.025724728, + -0.029482048, + -0.007156802, + -0.012832544, + -0.016609745, + -0.011212323, + 0.049501214, + -0.010034433, + -0.010456883, + -0.013329544, + 0.017335365, + -0.023438526, + 0.008657742, + -0.007723382, + -0.006490822, + 0.0023359007, + 0.0059640016, + -0.0138762435, + -0.0053775413, + 0.03479001, + -0.029243488, + -0.011609923, + -0.006242322, + 0.00025347006, + -0.04294081, + -0.025187967, + 0.029482048, + 0.03759309, + 0.013587983, + 0.018687205, + -0.03371649, + 0.017275725, + 5.8242204e-05, + 0.007102132, + 0.007276082, + -0.009790903, + -0.026122328, + -0.03310021, + 0.014174444, + 0.019482406, + 0.030018808, + 0.013458764, + 0.03622137, + -0.029641088, + 0.017166385, + -0.011371363, + 0.03914373, + 0.026360888, + 0.018995345, + -0.006366572, + -0.027036808, + -0.015913945, + 0.023776487, + 0.006719442, + -0.018150445, + 0.007271112, + -0.012882244, + -0.017295605, + -0.026102448, + 0.015735025, + -0.003754836, + 0.0048258714, + -0.022404766, + -0.040913053, + 0.016251905, + -0.030217608, + -0.028309127, + -0.04397457, + -0.03504845, + -0.012395184, + -0.039402172, + -0.019542046, + 0.03584365, + -0.020555926, + 0.0053029913, + -0.030217608, + 0.013170504, + -0.017424826, + 0.04357697, + 0.008439062, + 0.012385244, + 0.021410765, + 0.040594973, + 0.009989703, + 0.021748725, + 0.041429933, + -0.019204086, + -0.013737083, + -0.0055564614, + 0.025068687, + 0.04286129, + 0.00016866942, + 0.025048807, + -0.014234084, + 0.0015904005, + -0.024472287, + -0.003556036, + -0.0011983915, + -0.029541688, + -0.030237488, + 0.012444884, + -0.039760012, + 0.000839309, + 0.022365006, + 0.028786248, + -0.03315985, + 0.025744608, + -0.007067342, + -0.012643684, + -0.026102448, + 0.001646313, + 0.019243846, + -0.023875887, + -0.010735203, + -0.017474525, + 0.0077929622, + 0.04425289, + -0.026181968, + -0.017305546, + -0.018687205, + 0.032086328, + 0.009070252, + 0.017872125, + 0.003565976, + 0.0025844008, + 0.011599983, + 0.027692847, + 0.023140326, + -0.004130071, + 0.03496893, + 0.018051045, + -0.03437253, + -0.011122863, + 0.0053825113, + -0.012534344, + 0.007837692, + 0.030515809, + 0.006207532, + -0.03292129, + -0.011252083, + 0.009830663, + -0.010228263, + 0.013468704, + -0.011878303, + -0.008125952, + 0.04317937, + -0.04536617, + 0.003700166, + -0.016440764, + 0.020635447, + 0.0038964811, + -0.028070567, + -0.015049164, + -0.0051340116, + -0.0041027362, + -0.004299051, + -0.054630253, + -0.0048879962, + 0.0139259435, + 0.017116684, + 0.010387303, + 0.012872304, + -0.019780606, + -0.0017892005, + 0.009020553, + 0.010218323, + 0.032642968, + 0.004818416, + -0.026380768, + 0.0058248416, + 0.040952813, + -0.021828245, + -0.014452764, + 0.0011344028, + -0.03194717, + 0.007857572, + -0.012067163, + -0.0047264714, + 0.0022153782, + -0.0030764309, + -0.021052927, + -0.015983524, + 0.00041685888, + -0.022464406, + 0.0089957025, + -0.03974013, + 0.0022663206, + 0.052284416, + -0.020436646, + 0.026758488, + -0.0027484107, + 0.0056757415, + -0.0058795116, + 0.011093043, + 0.023199966, + -0.00015049786, + 0.012444884, + 0.03296105, + -0.010536403, + -0.011321663, + -0.020029105, + 0.007991762, + -0.041867293, + 0.025863888, + -0.003715076, + 0.016490465, + 0.014333484, + 0.0034566359, + 0.029998928, + -0.003106251, + -0.006257232, + -0.027613327, + -0.023239726, + -0.013230144, + -0.054113377, + 0.03447193, + -0.008001702, + 0.012037343, + 0.009597072, + 0.017554045, + -0.0021905282, + -0.001618978, + 0.0082651125, + -0.0011232203, + -0.007927152, + -0.018478464, + -0.009974793, + -0.019303486, + -0.039600972, + -0.0155859245, + 0.030476049, + 0.017226025, + -0.024313247, + 0.010039403, + 0.004234441, + -0.018707085, + 0.011023463, + -0.018488405, + 0.016798604, + 0.04373601, + -0.024710847, + 0.0069281817, + 0.0028279307, + 0.004015761, + -0.009085163, + 0.0030590359, + -0.0059242416, + -0.010705383, + -0.012653624, + 0.000961074, + 0.011142743, + -0.003352266, + 0.047871053, + -0.04544569, + -0.0015233054, + 0.023875887, + 0.00043021573, + 0.007474882, + -0.017206145, + -0.004443181, + -0.014035284, + 0.018001346, + 0.0011685715, + -0.020635447, + 0.0014760904, + 0.027832007, + 0.015397064, + -0.031489927, + 0.015327484, + 0.014015404, + 0.013518404, + 0.028905528, + -0.024690967, + 0.024810247, + -0.026181968, + -0.015317544, + 0.010993643, + 0.011450883, + 0.030297128, + 0.009920123, + -0.020068865, + 0.0053029913, + 0.015108804, + 0.0007604102, + -0.019144446, + -0.017365186, + 0.003215591, + 0.0025148208, + -0.023975287, + 0.004709076, + -0.021390885, + -0.012027403, + 0.024869887, + -0.028905528, + -0.03638041, + 0.0043015364, + -0.029859768, + 0.006232382, + -0.0069629718, + -0.0024464831, + 0.013120804, + 0.031231489, + 0.0082154125, + 0.0013232628, + 0.0028552657, + -0.009219352, + 0.019144446, + -0.006774112, + 0.0034094208, + -0.03500869, + 0.026221728, + -0.026718728, + 0.03735453, + 0.0155958645, + -0.0059242416, + -0.023677086, + -0.014691324, + 0.013359364, + -0.004299051, + 0.016500404, + 0.009209412, + 0.004085341, + -0.010774963, + -0.004738896, + 0.016490465, + -0.027315127, + 0.021033047, + -0.03445205, + 0.031092329, + 0.006749262, + -0.008806842, + -0.008056372, + -0.04441193, + 0.014184384, + -0.0155859245, + 0.016560044, + 0.007375482, + 0.011440943, + -0.026162088, + -0.018120624, + -0.012772904 + ], + "Category": "Boutique", + "Tags": [ + "view", + "air conditioning", + "concierge" + ] + }, + { + "@search.action": "mergeOrUpload", + "HotelId": "2", + "HotelName": "Old Century Hotel", + "Description": "The hotel is situated in a nineteenth century plaza, which has been expanded and renovated to the highest architectural standards to create a modern, functional and first-class hotel in which art and unique historical elements coexist with the most modern comforts. The hotel also regularly hosts events like wine tastings, beer dinners, and live music.", + "DescriptionVector": [ + -0.04683398, + -0.01285595, + 0.03386663, + -0.015239983, + -0.0033393162, + -0.014727527, + -0.012042706, + -0.011630513, + 0.0024954358, + -0.037431534, + 0.006550519, + 0.021155503, + -0.06024695, + -0.036050133, + 0.0026764662, + 0.036094695, + -0.06069256, + 0.014025685, + 0.052270465, + 0.01747919, + -0.020620765, + -0.017501472, + -0.04121925, + -0.07085255, + 0.01518428, + 0.013591212, + -0.06412379, + 0.050488014, + 0.020865854, + 0.05596906, + -0.001694724, + -0.020999538, + 0.0724122, + 0.038099956, + -0.023907166, + -0.055077832, + 0.015050597, + 0.011842179, + 0.0164877, + 0.014359896, + -0.032730315, + 0.012087267, + 0.01220981, + -0.011463407, + -0.00083482906, + -0.027271548, + -0.024285937, + 0.049953274, + -0.0077592456, + 0.0072412197, + -0.04284574, + 0.006394554, + -0.0024355564, + -0.0005716386, + -0.039369956, + 0.035426274, + -0.008778586, + -0.04821538, + 0.057439584, + 0.011274022, + 0.055612568, + 0.0020456447, + 0.06715396, + 0.016209193, + -0.06875817, + 0.031839088, + -0.026491724, + 0.029811544, + 0.016342876, + -0.009335604, + 0.038345043, + 0.036339782, + -0.0041637016, + -0.043313634, + -0.03622838, + -0.026825935, + 0.0059099495, + -0.00022872507, + -0.004712363, + 0.015540771, + -0.066619225, + -0.00857806, + -0.034980662, + 0.07210027, + 0.014215072, + -0.0058709583, + -0.0051830425, + 0.011909021, + -0.030903298, + 0.04516293, + 0.044739597, + 0.046611175, + 0.034133997, + 0.0056509366, + 0.015451649, + -0.017356647, + 0.0119647235, + -0.009001393, + -0.013245862, + 0.06327712, + 0.047235034, + -0.058108002, + 0.021589976, + -0.012310074, + -0.011418846, + -0.00444221, + 0.015195421, + -0.029922947, + 0.014816649, + -0.02078787, + -0.078160614, + -0.030212596, + -0.067822374, + -0.009346744, + -0.028274179, + -0.011246171, + 0.028318739, + -0.016298315, + -0.018660067, + -0.0020957761, + -0.005776265, + 4.8216774e-05, + -0.06185116, + -0.009981743, + -0.0084499465, + -0.0759771, + -0.014772088, + -0.049596786, + -0.008600341, + -0.021122081, + -0.019595854, + -0.0056481515, + 0.035493117, + -0.013535511, + -0.027561197, + 0.03125979, + 0.0224255, + 0.021545414, + -0.034557328, + -0.014315334, + -0.0031749962, + 0.023639798, + -0.023483833, + -0.02272629, + 0.0036484606, + 0.014025685, + 0.031059263, + 0.00037354947, + -0.032752592, + 0.0514238, + 0.00067712367, + 0.040261183, + 0.01584156, + 0.0109453825, + -0.034846976, + 0.012555161, + 0.01847068, + -0.008967972, + -0.037698902, + 0.04915117, + -0.031616278, + 0.0072690705, + -0.025912426, + 0.023684358, + 0.01418165, + 0.04901749, + -0.03792171, + 0.0058709583, + -0.03359926, + 0.022146992, + 0.006812317, + 0.016721647, + -0.030524526, + 0.064480275, + 0.04313539, + 0.04870556, + 0.006333282, + 0.005478261, + 0.019740678, + 0.010817268, + 0.002482903, + 0.01021569, + 0.07824974, + -0.008444376, + -0.027382951, + 0.02108866, + -0.09473743, + 0.010132138, + -0.004305741, + 0.014738667, + -0.012900512, + -0.01418165, + -0.020119451, + 0.031393472, + -0.023171904, + -0.009864769, + -0.021601116, + 0.028942598, + -0.002686214, + -0.010778277, + -0.06465852, + -0.06670834, + 0.009920471, + -0.0036985923, + -0.007642272, + -0.042778898, + 0.016231472, + -0.0059712213, + -0.014560422, + 0.0007401362, + -0.0013333592, + -0.030190317, + 0.03400031, + 0.026536286, + 0.0050215074, + -0.03658487, + -0.0626087, + 0.031081542, + -0.0051635467, + 0.050220642, + -0.020643046, + -0.017490331, + -0.020921554, + 0.02802909, + -0.017211823, + 0.0021319822, + -0.006823457, + -0.025154883, + -0.024196815, + -0.008940121, + -0.013346125, + -0.03660715, + -0.02108866, + 0.000490523, + -0.012566301, + 0.004812626, + -0.060870808, + 0.0074974475, + 0.03368838, + 0.049284857, + -0.033777505, + -0.011842179, + 0.02898716, + -0.0062664403, + 0.0500424, + 0.0040912894, + -0.019562434, + -0.04580907, + 0.030546807, + 0.03680768, + -0.053161692, + 0.015685596, + -0.0113074435, + -0.010043015, + 0.02996751, + 0.005514467, + -0.03883522, + -0.009636393, + -0.012466039, + -0.012544021, + 0.08306236, + 0.0016195267, + -0.030101193, + -0.004511837, + -0.005776265, + -0.034044873, + -0.00019164862, + 0.040238902, + -0.0070406934, + -0.02704874, + -0.027160143, + -0.017935945, + -0.039748725, + 0.0021528704, + 0.0055590286, + -0.029276809, + 0.004347517, + -0.019818662, + 0.017724277, + -0.014627264, + -0.019016556, + -0.061762035, + 0.037097327, + 0.014393317, + -0.040350303, + 0.043380477, + 0.04901749, + 0.0063555627, + -0.009981743, + -0.005447625, + 0.025600497, + -0.024308218, + 0.006723194, + -0.002704317, + -0.0064781066, + 0.017624015, + 0.01584156, + -0.02736067, + -0.03154944, + -0.009296612, + -0.03197277, + 0.024330499, + 0.040706795, + 0.010410646, + -0.03417856, + 0.036651712, + 0.0021166643, + 6.2229235e-05, + 0.02204673, + 0.021188922, + 0.032195576, + 0.012443758, + 0.009313323, + -0.055879936, + -0.028385581, + 0.002272629, + 0.0013486772, + 0.025823304, + -0.013067616, + -0.03600557, + 0.007480737, + 0.005865388, + 0.005015937, + 0.01747919, + 0.03357698, + -0.0729915, + -0.0030496675, + 0.0073024915, + -0.015696736, + -0.039459076, + 0.0034451496, + -0.01915024, + 0.013914282, + -0.027249267, + -0.038679253, + 0.040149778, + -0.0066674924, + 0.07464027, + 0.02666997, + -0.025979267, + -0.01053876, + -0.055612568, + -0.034111716, + -0.019584714, + 0.05641467, + -0.047279596, + -0.007959772, + -0.039035745, + 0.042110477, + 0.03567136, + -0.06697571, + -0.012466039, + 0.015306825, + 0.04382609, + 0.029120844, + -0.0013598176, + 0.051824853, + 0.03785487, + -0.08234938, + 0.0045563984, + -0.035203468, + -0.028452424, + 0.0016195267, + 0.0119424425, + -0.01088411, + -0.0051273406, + -0.009981743, + -0.006450256, + -0.014738667, + -0.04514065, + -0.03894662, + -0.027850846, + 0.0048683276, + -0.00792078, + -0.05904379, + 0.037721183, + 0.030101193, + 0.0001987854, + 0.0069961324, + -0.042444687, + 0.032039613, + -0.0029689001, + 0.012588582, + -0.037075046, + 0.035805047, + 0.0019161381, + 0.023127342, + -0.00824385, + 0.0041748416, + 0.027182424, + 0.019707259, + -0.04710135, + -0.01714498, + -0.03890206, + 0.02370664, + -0.021266906, + 0.034089435, + -0.006227449, + -0.026892776, + -0.011831039, + 0.017501472, + -0.039102588, + 0.0019314562, + -0.048660997, + -0.028073652, + -0.04449451, + 0.014883491, + -0.008656043, + 0.0073916144, + 0.028786633, + -0.027895406, + -0.011235031, + 0.01683305, + -0.009848059, + -0.013736037, + -0.015596474, + 0.025801023, + -0.04342504, + 0.00024056167, + 0.027405232, + 0.04683398, + 0.028764352, + 0.022336379, + 0.013568932, + 0.0010987158, + -0.009318893, + -0.031616278, + -0.03569364, + -0.055256076, + -0.040060654, + -0.00070358196, + -0.020810151, + -0.015373667, + -0.012889371, + -0.04378153, + 0.0659508, + -0.050220642, + 0.00956955, + 0.010633453, + 0.0067733256, + -0.050621696, + -0.012800248, + 0.005043788, + -0.029945228, + 0.007514158, + 0.054498535, + 0.013312704, + 0.07504132, + -0.00010800906, + -0.036540307, + -0.0077592456, + 0.00039513386, + -0.024642428, + -0.021679098, + 0.0028004025, + 0.0026639334, + 0.017701996, + 0.012388056, + 0.00957512, + -0.017122699, + 0.0035844038, + -0.03584961, + -0.023751201, + 0.017222963, + 0.02278199, + -0.036094695, + -0.03847873, + 0.074194655, + 0.01155253, + -0.05659292, + -0.029410493, + -0.016632525, + 0.061762035, + 0.023149623, + 0.0061661773, + -0.013502089, + 0.0030524526, + 0.05997958, + 0.0049184593, + 0.012889371, + 0.019495592, + 0.009508278, + 0.0020512147, + -0.00428903, + -0.012633143, + -0.023951726, + -0.042021357, + -7.0279864e-05, + -0.028251898, + -0.015752438, + -0.0052638096, + -0.007998763, + 0.008004333, + -0.0078817895, + -0.001859044, + 0.037052765, + -0.029031722, + -0.02435278, + -0.023550674, + -0.0026639334, + 0.0081992885, + 0.012978494, + 0.020654187, + 0.016120069, + 0.02566734, + 0.06568343, + -0.0031443604, + -0.001451029, + 0.041085567, + -0.004043943, + -0.007798237, + -0.020598484, + 0.003392233, + 0.0113965655, + 0.029254528, + 0.014850071, + 0.019127961, + 0.014359896, + -0.021356028, + -0.013758318, + 0.016231472, + 0.018660067, + -0.01418165, + 0.011162619, + 0.0033198209, + 0.041018724, + -0.029856106, + -0.060514316, + -0.008422095, + -0.021578835, + -0.011246171, + -0.013669195, + 0.00069139723, + -0.023216464, + 0.019261645, + -0.013914282, + 0.018715767, + 0.014783229, + 0.033799786, + 0.004742999, + 0.038099956, + 0.018504102, + -0.037498377, + -0.018069629, + -0.015607614, + -0.019339627, + -0.023327868, + 0.0025650628, + -0.009307752, + 0.0030134614, + 0.05841993, + 0.014215072, + -0.038345043, + 0.0022489557, + -0.0092242, + -0.0062441593, + 0.023149623, + 0.045051526, + 0.0026179794, + 0.045385737, + -0.015574193, + 0.0048989635, + -0.004999227, + 0.036362063, + 0.02602383, + -0.0269819, + 0.007436176, + 0.05075538, + 0.01681077, + -8.929677e-05, + -0.019306205, + 0.020353397, + -0.0048544025, + 0.043514162, + 0.006021353, + 0.029455055, + 0.0010799165, + 0.0044867713, + -0.013624634, + 0.018225593, + 0.022291817, + -0.0060436334, + 0.016342876, + 0.020030327, + -0.019963486, + -0.015897263, + 0.06523782, + 0.049685907, + 0.01549621, + 0.02936593, + 0.010020734, + -0.0016989015, + -0.0031248648, + -0.028318739, + 0.0230605, + -0.005166332, + -0.0051440513, + 0.007319202, + -0.030791894, + 0.068045184, + -0.014738667, + -0.0230605, + 0.006879159, + 0.009803497, + -0.0082215695, + 0.04585363, + -0.0029772553, + -0.017189542, + -0.010705865, + -0.025645059, + -0.00084318436, + -0.017111558, + -0.008739595, + -0.04875012, + -0.04121925, + 0.00010339626, + -0.035649084, + -0.010583322, + 0.014983755, + -0.008979113, + -0.0077536753, + 0.010605602, + 0.008383105, + 0.02666997, + 0.0066117905, + 0.031036982, + 0.013301563, + -0.002193254, + -0.014560422, + 0.025221726, + 0.042868022, + 0.016242612, + 0.03092558, + -0.024441902, + -0.028920317, + 0.035181187, + 0.03892434, + -0.0054337, + 0.010004024, + -0.02671453, + 0.0079040695, + 0.019250505, + 0.030212596, + 4.560576e-05, + 0.009731085, + 0.025132602, + 0.02929909, + 0.020921554, + 0.018927434, + -0.01055547, + -0.029276809, + 0.028942598, + 0.03560452, + -0.006032493, + 0.0017685287, + -0.0056592915, + -0.032685753, + -0.011040075, + -0.007970911, + 0.025466813, + 0.00892341, + 0.054765902, + -0.012120687, + -0.01810305, + 0.0019091754, + 0.020119451, + -0.0013347517, + -0.00230048, + -0.05240415, + 0.03304224, + 0.0718329, + 0.03195049, + 0.04776977, + 0.02435278, + 0.03137119, + 0.013680335, + -0.017378928, + -0.026803654, + -0.00013011567, + -0.010393935, + -0.003963175, + 0.018203313, + -0.02961102, + 0.0041637016, + 0.014916913, + 0.0039325394, + -0.020954976, + 0.06880273, + 0.022291817, + 0.00724679, + -0.00088426436, + 0.0041024294, + 0.008906701, + 1.8549097e-05, + 0.035827328, + -0.03166084, + 0.00888442, + -0.02466471, + -0.02802909, + -0.04222188, + 0.018983137, + 0.0062720105, + -0.0045731086, + -0.014549281, + 0.012020425, + 0.011630513, + 0.021835063, + -0.014749807, + 0.030769615, + 0.013791738, + 0.025845584, + -0.02867523, + 0.00033890997, + -0.0050215074, + 0.049195733, + 0.03569364, + -0.024575585, + 0.013903142, + -0.0052220332, + -0.017189542, + 0.021055238, + 0.00956955, + -0.013969984, + -0.043581, + -0.021333747, + -0.014939194, + -0.0018270154, + -0.009719945, + -0.015262263, + 0.05828625, + -0.009853629, + 0.0049936567, + 0.0058375373, + -0.0027015319, + 0.053161692, + -0.01483893, + 0.03362154, + 0.011190469, + -0.008082315, + 0.005639796, + -0.014226212, + -0.024686988, + -0.0047959154, + -0.03322049, + 0.024597866, + -0.044717316, + 0.016086647, + -0.020542784, + -0.05690485, + 0.025600497, + -0.058152564, + 0.007051834, + 0.019930065, + 0.0049546654, + -0.0024230236, + -0.03988241, + 0.022536904, + 0.062163085, + 0.006160607, + 0.021211203, + -0.012933932, + 0.008327403, + -0.006121616, + 0.029232247, + 0.003924184, + -0.007447316, + 0.04516293, + 0.013658054, + -0.022692868, + -0.04349188, + 0.016053228, + -0.022169273, + -0.010483058, + 0.010360515, + -0.0006656352, + -0.013346125, + 0.027494354, + -0.038411885, + 0.0060436334, + -0.015975244, + 0.031415753, + -0.030747334, + -0.017746558, + 0.0052276035, + 0.026313478, + -0.007157667, + -0.030479966, + -0.015507351, + 0.016710507, + -0.0035426274, + 0.0055534584, + -0.023974007, + 0.010772707, + -0.0064224047, + -0.0016042087, + 0.015540771, + 0.03400031, + -0.01615349, + -0.027115583, + 0.023327868, + 0.015407087, + -0.014515861, + -0.0151174385, + -0.02209129, + -0.0001199327, + 0.026424881, + 0.01151911, + 0.024129972, + 0.00826613, + -0.022859974, + -0.01219867, + -0.031126104, + -0.0027307754, + -0.034735575, + 0.04549714, + -0.012744547, + 0.004812626, + -0.029544177, + 0.014415598, + 0.014081387, + -0.024486464, + -0.0020428596, + -0.004784775, + 0.03789943, + -0.016276034, + 0.018225593, + 0.007948631, + -0.018804891, + -0.0067343344, + -0.029833825, + -0.009530559, + 0.024776112, + -0.052894324, + 0.028118214, + 0.0037320133, + 0.017891383, + 0.008745166, + 0.0336661, + 0.011374285, + 0.0054504103, + 0.040729076, + -0.009530559, + -0.02936593, + -0.0020038683, + 0.0053585027, + -0.011274022, + 0.045185212, + -0.014994895, + -0.009012533, + 0.028140495, + 0.01677735, + -0.0058208266, + 0.04407118, + -0.025778743, + 0.004060653, + 0.003328176, + 0.0072579305, + 0.013412967, + 0.0032028472, + 0.015986385, + -0.026781373, + 0.028831195, + 0.01155253, + -0.034423646, + -0.009508278, + 0.022648307, + -0.024597866, + -0.032730315, + 0.054008357, + -0.021935325, + -0.01449358, + -0.015941823, + 0.012978494, + 0.009185209, + 0.0010569396, + 0.015596474, + -0.0009692094, + 0.040016096, + 0.008979113, + -0.05164661, + -0.0017142196, + -0.009753366, + 0.009686524, + 0.035493117, + -0.0052526696, + 0.028185055, + -0.045296613, + 0.016342876, + -0.029744703, + 0.02967786, + 0.008138017, + 0.003258549, + 0.017701996, + -0.012376916, + -0.01744577, + 0.004250039, + -0.002126412, + -0.0034869257, + -0.018804891, + 0.050265204, + -0.030903298, + -0.0067510447, + 0.003553768, + 0.0062553, + -0.010276962, + 0.020275416, + -0.017000156, + 0.028764352, + -0.012878231, + -0.00312765, + 0.024753831, + -0.021032957, + -0.03400031, + -0.018593224, + -0.030457685, + 0.020487081, + -0.019640416, + 0.016755069, + 0.024597866, + -0.0063499925, + 0.009430296, + -0.0007902677, + -0.042756617, + 0.028719792, + 0.019618135, + 0.028898036, + 0.00856692, + 0.0027293828, + -0.0021431225, + -0.02929909, + 0.01976296, + 0.0030524526, + 0.010639023, + -0.011118057, + 0.043670125, + 0.02339471, + 0.0063555627, + 0.014861211, + 0.03587189, + -0.023550674, + 0.0016264893, + 0.036785398, + -0.020854713, + 0.00823828, + 0.030457685, + -0.028073652, + 0.032173295, + -0.003294755, + 0.023974007, + 0.0064446856, + 0.012365775, + 0.034356803, + -0.013747177, + -0.017624015, + 0.017646296, + -0.0038796228, + -0.0005267291, + -0.0018855022, + -0.036139257, + 0.018236734, + -0.03752066, + 0.018593224, + 0.010678014, + -0.012777967, + -0.0057149935, + 0.0007888752, + 0.0045285476, + 0.05302801, + -0.07432833, + -0.014749807, + -0.039080307, + -0.013479809, + -0.011574811, + 0.0056648618, + -0.024865234, + 0.048883803, + 0.032396104, + 0.006812317, + -0.02410769, + 0.0015693951, + -0.0056453664, + 0.00016606066, + 0.048037138, + -0.010505339, + -0.010505339, + 0.025890145, + 0.0021848988, + 0.03649575, + 0.050265204, + 0.006494817, + -0.0026207645, + -0.045185212, + 0.02736067, + 0.009029244, + -0.014014545, + -0.026068391, + 0.004392078, + -0.04616556, + -0.013446388, + 0.0027753366, + 0.03324277, + -0.0044394247, + -0.008644902, + -0.03433452, + -0.029878387, + 0.017278664, + -0.00954727, + 0.00989262, + 0.029722422, + -0.0056509366, + 0.025177164, + 0.046566613, + -0.010098716, + -0.012677705, + -0.02410769, + -0.017557172, + 0.021823922, + -0.0008215999, + -0.041687146, + 0.017356647, + 0.009786787, + 0.030457685, + 0.041286092, + 0.009926042, + -0.013379546, + 5.1611096e-05, + 0.031415753, + 0.0021835063, + 0.026246637, + 0.015618754, + 0.013880861, + 0.0049713757, + -0.011184899, + 0.024620147, + -0.01217639, + 0.00858363, + -0.026291197, + 0.003857342, + -0.02470927, + 0.015908403, + 0.0041609164, + 0.0029828255, + 0.0071353866, + -0.02406313, + -0.013791738, + 0.032819435, + -0.0073971846, + 0.037832588, + 0.025065761, + -0.016755069, + 0.006628501, + 0.030078912, + -0.038122237, + 0.008310692, + -0.013424108, + -0.025979267, + -0.023461552, + -0.016342876, + 0.03495838, + -0.024219096, + 0.022269536, + -0.005015937, + -0.013646914, + -0.0409296, + -0.017969366, + 0.0028004025, + -0.03065821, + 0.017434629, + -0.02539997, + 0.009018104, + 0.0072356495, + 0.029633299, + 0.00071576674, + 0.010031874, + 0.0075308685, + -0.017334366, + -0.0013208264, + -0.00079444534, + 0.009474858, + 0.029477334, + -0.0014273558, + 0.001399505, + 0.013468669, + -0.0003241142, + -0.00061237044, + -0.014047966, + 0.02635804, + -0.01811419, + -0.008527929, + 0.014270773, + -0.01217639, + 0.0015220487, + -0.013947703, + -0.0023130127, + 0.00027520116, + 0.012109548, + 0.00020818507, + 0.024486464, + -0.016933315, + 0.04079592, + 0.004149776, + -0.01946217, + 0.01414823, + -0.016632525, + -0.039815567, + 0.0041274955, + -0.0009845274, + -0.009118367, + 0.015518491, + -0.011909021, + 0.022815412, + -0.009313323, + 0.016175771, + 0.033710662, + 0.020888133, + 0.048170824, + 0.011023365, + 0.01910568, + -0.00021131829, + 0.00510506, + -0.011090207, + -0.009279901, + -0.0110122245, + 0.037832588, + 0.0059879315, + -0.010639023, + 0.02733839, + 0.00988705, + 0.006400124, + 0.020230854, + -0.009218629, + -0.00024386897, + 0.017055858, + -0.020008048, + -0.018336996, + 0.006550519, + 0.0076979734, + 0.048571873, + -0.0018743619, + -0.012878231, + 0.03616154, + -0.0051440513, + 0.01910568, + -0.0028742072, + 0.007775956, + 0.00084318436, + 0.034445927, + 0.04315767, + 0.035002943, + 0.022291817, + -0.007313632, + 0.0045898193, + 0.0066452115, + -0.008806437, + 0.0054225596, + 0.019952346, + -0.005798546, + -0.01153025, + -0.00477085, + -0.040729076, + -0.00049504876, + 0.0069070095, + -0.019985767, + -0.011374285, + 0.020810151, + 0.019807521, + -0.061271857, + 0.010293673, + 0.0119647235, + -0.014861211, + 0.030413123, + -0.00955841, + 0.0014635619, + -0.01053876, + 0.006895869, + -0.019974627, + 0.009497138, + 0.021222344, + 0.032752592, + 0.011652794, + -0.0013723504, + 0.003364382, + 0.00826056, + 0.012332355, + 0.029254528, + 0.024820672, + -0.008160298, + -0.0031248648, + -0.014192791, + 0.02406313, + 0.019729538, + 0.041999076, + 0.017222963, + -0.042957142, + 0.0065672295, + 0.052270465, + -0.013546651, + -0.0048516174, + -0.0062107383, + 0.008466657, + 0.010120997, + -0.008349683, + 0.016621385, + -0.022146992, + -0.0388575, + -0.020386819, + -0.041352935, + 0.007658982, + -0.04438311, + -0.013346125, + 0.0064224047, + -0.0066452115, + -0.021333747, + -0.011251741, + 0.030836456, + -0.012421477, + 0.0024090982, + -0.008310692, + -0.0068568783, + -0.012978494, + 0.0020665326, + 0.009825778, + 0.029098563, + 0.037297852, + 0.05111187, + -0.028318739, + -0.012343494, + -0.021901906, + -0.015295684, + 0.019818662, + 0.0006558874, + -0.004080149, + -0.054855026, + 0.03099242, + 0.0052136783, + -0.014415598, + 0.008371964, + 0.012142968, + -6.162e-05, + -0.0067677554, + -0.041642584, + 0.02504348, + 0.027494354, + -0.012956213, + -0.009441436, + -0.002783692, + -0.00034935403, + 0.018682348, + 0.025533656, + -0.01811419, + 0.02273743, + 0.015351386, + -0.035136625, + -0.011402136, + 0.002501006, + -0.047502402, + -0.037052765, + 0.00957512, + 9.190779e-05, + -0.01812533, + -0.014972614, + -0.010672444, + 0.0036373204, + -0.002637475, + -0.021545414, + -0.00239378, + 0.012699985, + 0.024508744, + 0.0042695347, + 0.015607614, + -0.01252174, + -0.0020707103, + -0.0061494666, + 0.051869415, + -0.01483893, + -0.013424108, + 0.019740678, + -0.010416216, + 0.00758657, + -0.014326475, + -0.003659601, + -0.022146992, + -0.007174378, + 0.00923534, + 0.015796999, + 0.026402602, + 0.03097014, + -0.005397494, + 0.027182424, + -0.0077258246, + 0.023751201, + -0.010622312, + -0.009357884, + -0.06060344, + -0.026424881, + 0.0036094696, + 0.0052526696, + -0.042645212, + -0.0077926666, + -0.010639023, + -0.023951726, + 0.0072022285, + 0.023684358, + -0.013747177, + -0.043714687, + -0.014103668, + 0.005071639, + -0.026959619, + -0.004578679, + -0.023639798, + -0.0032084174, + 0.018916294, + 0.0012024603, + 0.015696736, + -0.035938732, + -0.017980505, + -0.0027934397, + 0.037676625, + -0.029499615, + 0.0057818354, + 0.0016473775, + 0.006416835, + 0.0056620766, + -0.01776884, + 0.006494817, + 0.003094229, + -0.007581, + -0.010070866, + -0.005670432, + 0.036852237, + -0.03558224, + 0.014103668, + -0.017880242, + -0.037119605, + -0.01715612, + -0.0055061122, + -0.020353397, + 0.011374285, + -0.024174534, + 0.013591212, + 0.02441962, + -0.039191708, + 0.00789293, + 0.03359926, + 0.0065393783, + -0.018615505, + -0.03417856, + -0.041352935, + -0.028095933, + -0.019818662, + -0.00019634845, + -0.035136625, + -0.022882255, + -0.007842798, + 0.020219713, + -0.006728764, + 0.04875012, + -0.0052805203, + 0.008282841, + -0.016120069, + 0.012432617, + -0.010070866, + 0.03567136, + -0.0103995055, + 0.01940647, + 0.051200993, + -0.008795297, + 0.02473155, + -0.0016613029, + -0.011017795, + -0.017590594, + -0.02470927, + 0.0009037599, + -0.004974161, + 0.013947703, + 0.025199445, + -0.047591522, + -0.023773482, + 0.011463407, + 0.0018492962, + -0.012878231, + -0.02078787, + 0.012878231, + 0.005770695, + 0.0125328805, + -0.037386976, + 0.003294755, + 0.017612875, + 0.009068235, + 0.012544021, + -0.020186292, + 0.03259663, + 0.020164011, + 0.011686214, + 0.017902523, + 0.00034413202, + -0.00029643744, + -0.009140647, + 0.038389605, + -0.009363454, + 0.013268143, + -0.013858581, + 0.005375213, + 0.034780137, + 0.025177164, + -0.0026848214, + -0.028853476, + 0.029031722, + -0.013457528, + -0.014894632, + 0.031215228, + 0.008394245, + 0.018994275, + 0.018593224, + 0.02076559, + -0.012666564, + 0.005999072, + -0.01646542, + 0.004392078, + -0.01549621, + 0.025310848, + -0.01681077, + -0.003425654, + -0.0042806747, + -0.012332355, + 0.030390842, + 0.011251741, + 0.038010832, + -0.009848059, + -0.0015582548, + 0.019350767, + 0.0197741, + -0.021133222, + 0.065326944, + 0.037943993, + -0.011073496, + -0.03723101, + 0.03226242, + 0.020509362, + 0.02566734, + 0.02666997, + 0.0015610398, + 0.01679963, + -0.021333747, + 0.022503482, + 0.007937491, + -0.014192791, + 0.010193409, + -0.010254681, + -0.03259663, + 0.025555935, + 0.023194185, + 0.0050883493, + -0.00012419737, + -0.005464336, + 0.014671825, + -0.0028212906, + 0.029544177, + -0.0072635002, + -0.001176002, + 0.023595236, + -0.014281914, + 0.021266906, + 0.0017323226, + 0.018582083, + -0.046655737, + 0.020197432, + -0.039102588, + -0.0012783537, + -0.03295312, + 0.000485301, + 0.0071298163, + -0.005035433, + 0.004305741, + 0.009719945, + -0.04079592, + 0.011190469, + 0.010995514, + -0.0019161381, + -0.03132663, + -0.027739441, + -0.0004964413, + 0.032485224, + -0.027249267, + 0.00791521, + -0.00626087, + -0.004587034, + 0.016331736, + -0.03433452, + 0.02441962, + 0.00954727, + 0.017746558, + 0.043068547, + -0.0010820053, + -0.031126104, + 0.072055705, + 0.008650472, + 0.0047290735, + 0.0015109084, + -0.0066507817, + 0.026090672, + 0.00759214, + -0.008973543, + -0.023528393, + -0.039035745, + -0.044360828, + -0.00958069, + 0.0016543402, + 0.014816649, + 0.0074584563, + -0.0040996443, + -0.0017782765, + 0.008873279, + -0.012488319, + 0.01646542, + 0.025177164, + 0.03257435, + -0.0059099495, + -0.0107392855, + 0.01546279, + -0.029187685, + -0.035092063, + -0.018515242, + -0.00070706336, + 0.018437259, + -0.022057869, + 0.004252824, + -0.021913044, + 0.00691815, + -0.018570943, + 0.01979638, + -0.00064056943, + -0.017568313, + -0.020253135, + 0.009926042, + -0.019317346 + ], + "Category": "Boutique", + "Tags": [ + "pool", + "free wifi", + "air conditioning", + "concierge" + ] + }, + { + "@search.action": "mergeOrUpload", + "HotelId": "3", + "HotelName": "Gastronomic Landscape Hotel", + "Description": "The Gastronomic Hotel stands out for its culinary excellence under the management of William Dough, who advises on and oversees all of the Hotel\u2019s restaurant services.", + "DescriptionVector": [ + -0.048865054, + -0.020307425, + 0.017633565, + 0.023875887, + -0.04401433, + -0.021689085, + -0.04437217, + 0.011500583, + 0.03840817, + 0.00029058976, + 0.016907945, + -0.009214383, + -0.04512761, + 0.019889945, + 0.020973407, + 0.023040926, + -0.026539808, + 0.050495215, + 0.07152826, + -0.008786962, + -0.009994673, + -0.0053129313, + -0.014601864, + -0.048069853, + 0.021231845, + 0.022066806, + -0.018021226, + -0.010526463, + 0.07220418, + 0.0068685417, + 0.009472823, + -0.023239726, + 0.040276892, + 0.03399481, + 0.0156058045, + -0.001837658, + -0.009567252, + -0.03630089, + 0.009010613, + 0.027672967, + -0.023398766, + 0.030078448, + 0.018428765, + -0.006709502, + -0.03598281, + -0.018021226, + -0.017782666, + 0.06655826, + -0.019909825, + 0.010963823, + -0.028428407, + 0.007325782, + -0.030833889, + -0.045724012, + -0.0780489, + 0.024253607, + 0.018220024, + -0.022762606, + 0.056777295, + 0.007817812, + 0.03355745, + 0.029163968, + 0.031967048, + 0.029959168, + -0.051568735, + 0.057294175, + -0.0156157445, + 0.03759309, + -0.046002332, + -0.020396886, + 0.053278416, + 0.016371185, + 0.03170861, + -0.015685324, + 0.0010555041, + 0.024094567, + 0.0051886817, + 0.012872304, + 0.004055521, + -0.03315985, + -0.013568103, + -0.023359006, + -0.072243944, + 0.026480168, + 0.025068687, + 0.009010613, + -0.018090805, + -0.025207847, + 0.009408212, + 0.0025123358, + 0.024591567, + -0.003725016, + -0.0053924513, + -0.025227727, + -0.055385694, + 0.012136743, + -0.011709323, + -0.041310653, + -0.021828245, + 0.04373601, + 0.030217608, + 0.023199966, + -0.012912064, + 0.020277606, + 0.021609565, + -0.031887528, + 0.014164504, + -0.062264178, + 0.03315985, + 0.0034218458, + -0.07550426, + 0.007653802, + -0.04544569, + -0.030973049, + -0.0029298158, + 0.041708253, + 0.053198896, + -0.03379601, + -0.010834603, + 0.025168087, + -0.031569447, + -0.023836127, + -0.025088567, + -0.009935033, + 0.0017009829, + -0.03395505, + 0.03174837, + -0.030814009, + -0.0155958645, + -0.0030192758, + 0.009477792, + -0.024830127, + -0.046757773, + 0.0055216714, + -0.015069044, + 0.024015047, + 0.015735025, + -0.020655327, + -0.020357126, + 0.015287724, + 0.003705136, + -0.03389541, + -0.026142208, + -0.041390173, + -0.03705633, + 0.06818842, + 0.03186765, + 0.007181652, + -0.012802724, + 0.030694729, + 0.025366887, + 0.064729296, + 0.029680848, + -0.011639743, + -0.0016351305, + 0.0029944258, + 0.021788485, + -0.017921826, + -0.03486953, + 0.040992573, + -0.021629445, + 0.03576413, + -0.07232346, + 0.004868116, + 0.055783294, + 0.031112209, + -0.046121612, + -0.049262654, + -0.04500833, + -0.023021046, + 0.03538641, + -0.020536046, + 0.006500762, + 0.031808008, + 0.03359721, + 0.052920576, + -0.017812485, + -0.014949764, + 0.028845888, + 0.019780606, + 0.019999286, + -0.020874007, + 0.0865973, + -0.057691775, + 0.019442646, + 0.03190741, + -0.079122424, + 0.046519212, + 0.018170325, + 0.012196383, + 0.013448824, + 0.009865453, + -0.0850069, + 0.0057204715, + -0.03270261, + 0.051727775, + -0.03242429, + -0.041151613, + 0.012902124, + 0.0003308157, + -0.011937943, + 0.0045102765, + 0.018617624, + -0.016401004, + -0.018369125, + 0.009716352, + 0.0052185017, + -0.024850007, + 0.019880006, + 0.03294117, + -0.004353721, + -0.04373601, + 0.019134505, + 0.0693017, + -0.016222084, + -0.03570449, + -0.050018094, + 0.003702651, + -0.028448287, + 0.047791533, + 0.00023576444, + 0.0012723204, + 0.0047712014, + 0.028030807, + -0.026162088, + 0.06846674, + -0.0069281817, + -0.025963288, + -0.004067946, + 0.011848483, + 0.0010604741, + -0.013090984, + -0.024174087, + -0.029541688, + -0.014224144, + 0.04238417, + 0.007236322, + 0.0034392409, + -0.03447193, + -0.013001524, + -0.03357733, + 0.007017642, + -0.008697502, + 0.011450883, + 0.030058568, + -0.019154385, + -0.014104864, + -0.022822246, + -0.011013523, + 0.024631327, + -0.0059391516, + 0.03238453, + 0.03644005, + -0.028925408, + 0.020774607, + -0.0029447258, + 0.0016053105, + 0.015426884, + 0.041946813, + 0.025426527, + 0.019094745, + -0.000408472, + 0.056061614, + -0.024492167, + -0.012385244, + -0.046996333, + -0.054868814, + 0.030694729, + 0.00025517851, + -0.059918337, + -0.045843292, + 0.0029571508, + -0.0068486617, + -0.03745393, + 0.03638041, + -0.031092329, + 0.0055167014, + 3.5877198e-05, + -0.042145614, + -0.0138861835, + -0.022086686, + -0.03785153, + 0.07232346, + -0.013031344, + -0.018657384, + -0.006461002, + -0.013826543, + 0.029422408, + -0.023716846, + 0.007141892, + -0.0025309732, + 0.0026788306, + 0.011659623, + -0.03838829, + -0.00011531956, + -0.007922182, + 0.022881886, + -0.06938122, + 0.002265078, + -0.0021681632, + -0.023736726, + 0.0750669, + 0.03610209, + -0.014820544, + -0.018041106, + 0.061429217, + 0.003287656, + -0.029800128, + 0.020436646, + 0.022941526, + -0.0022812306, + 0.020237846, + -0.019184206, + -0.0716873, + -0.022066806, + -0.039879292, + 0.014701264, + -0.0058447216, + -0.032245368, + 0.0060137017, + 0.010049343, + -0.021470405, + -0.0050147315, + 0.007718412, + 0.057413455, + -0.023657206, + 0.011798783, + 0.025943408, + -0.009199472, + -0.0021818306, + 0.040952813, + -0.032682728, + 0.018190205, + -0.0026639206, + 0.022444526, + 0.016629625, + -0.015466644, + -0.014800664, + 0.024512047, + 0.0016475555, + 0.014512404, + -0.058327936, + -0.012653624, + -0.010049343, + 0.064331695, + -0.025983168, + -0.010337603, + -0.017971525, + -0.013677443, + -0.010993643, + -0.056817055, + -0.027593447, + -0.009542403, + 0.010009583, + 0.014422944, + 0.014850364, + 0.007609072, + 0.054550733, + -0.011073163, + 0.039839532, + -0.024452407, + -0.024929527, + 0.017822426, + -0.007151832, + 0.014760904, + 0.007256202, + -0.045724012, + 0.009646772, + -0.027692847, + 0.017395005, + -0.007678652, + 0.0056459215, + 0.013220204, + 0.009607012, + -0.064013615, + 0.017116684, + -0.001591643, + 0.008886362, + -0.04234441, + -0.041310653, + -0.0020849155, + -0.04294081, + 0.013478644, + -0.028388647, + -0.010526463, + 0.022265606, + -0.004798536, + -0.014870244, + -0.027573567, + 0.057015855, + 0.04492881, + -0.011560223, + 0.0049749715, + -0.008364513, + -0.011540343, + 0.010228263, + 0.015029284, + 0.052960336, + -0.021331245, + -0.0029397558, + 0.058407456, + -0.04341793, + -0.011083103, + -0.022524046, + 0.03178813, + -0.014661504, + -0.03381589, + -0.055226654, + -0.0069679418, + 0.0030714609, + -0.03801057, + -0.023796367, + 0.008453973, + -0.019015225, + 0.024273487, + 0.007400332, + 0.04262273, + -0.026818128, + -0.0122659635, + 0.004773686, + 0.047155373, + 0.03572437, + 0.03767261, + 0.03482977, + 0.012941884, + 0.018597744, + 0.0012580316, + -0.012216263, + -0.07781034, + -0.03226525, + 0.015019344, + -0.0059987917, + 0.023120446, + 0.057135135, + -0.019989345, + 0.060196657, + -0.020416766, + 0.012434944, + -0.024392767, + -0.021072807, + -0.057612255, + -0.016659444, + -0.04242393, + -0.03741417, + 0.023040926, + 0.051250655, + 0.012434944, + 0.062184658, + 0.011480703, + 0.0044829412, + 0.0021992256, + -0.017723026, + -0.020327305, + 0.021132445, + -0.03870637, + 0.07005714, + 0.019383006, + -0.016540164, + 0.03753345, + -0.03630089, + 0.018786605, + -0.010755083, + -0.022186086, + -0.0048109614, + 0.052284416, + 0.023418646, + -0.011997583, + 0.017563986, + 0.024671087, + -0.013130744, + -0.04520713, + -0.030376649, + 0.04298057, + 0.04266249, + 0.018667325, + -0.03809009, + -0.029621208, + 0.053397696, + 0.041787773, + -0.018776665, + 0.015347364, + -0.004224501, + -0.03628101, + -0.030654969, + -0.024333127, + -0.004423301, + -0.050813295, + 0.021689085, + -0.002314778, + 0.016748905, + 0.008260142, + 0.020476406, + 0.052801296, + -0.004714046, + 0.015814545, + -0.0106358025, + -0.049103614, + -0.027513927, + -0.030913409, + -0.016301604, + -0.008329722, + 0.040833533, + 0.03230501, + -0.050614495, + 0.014492524, + 0.03178813, + -0.011361423, + -0.024472287, + 0.017375125, + -0.018895945, + -0.032245368, + 0.017544106, + -0.017315485, + 0.010437003, + -0.024333127, + 0.020317366, + 0.010685503, + 0.012484644, + 0.011629803, + -0.015874185, + 0.009602043, + 0.001994213, + -0.023895767, + 0.030575449, + 0.036121972, + 0.039680492, + -0.03906421, + -0.028726608, + 0.026818128, + 0.027275367, + -0.018548045, + -0.03502857, + -0.014949764, + 0.03822925, + 0.010347543, + 0.0016599805, + -0.009626892, + -0.021708965, + 0.011669563, + 0.0053328113, + 0.060554497, + 0.003690226, + -0.0015096379, + -0.030257368, + -0.026261488, + -0.0060137017, + -0.03385565, + -0.010054313, + 0.024909647, + -0.024154207, + 0.011232203, + 0.0012617591, + 0.008881393, + 0.03846781, + 0.014939824, + 0.009318752, + 0.048030093, + -0.016351305, + -0.014234084, + 0.019621566, + -0.055504974, + 0.023677086, + -0.021013167, + 0.001827718, + 0.03508821, + 0.003188256, + 0.025585568, + 0.04262273, + 0.0028428407, + -0.026639208, + 0.023319246, + -0.014353364, + -0.014661504, + -0.003752351, + 0.0155958645, + 0.010526463, + 0.012096983, + 0.010745143, + 0.013498524, + 0.049739774, + 0.03357733, + -0.016162444, + 0.023279486, + 0.021152325, + -0.04298057, + 0.013975644, + 0.018329365, + 0.025187967, + -0.017563986, + 0.03592317, + 0.006396392, + -0.007166742, + 0.04512761, + 0.0057055615, + -0.010675563, + 0.006267172, + -0.04302033, + -0.0021656782, + -0.03371649, + 0.026838008, + -0.028567567, + 0.0015270329, + 0.003747381, + -0.011102983, + 0.004674286, + 0.025963288, + -0.008419182, + 0.023100566, + 0.010397243, + -0.025923528, + 0.012723204, + 0.0028030807, + 0.022782486, + 0.011699383, + -0.029979048, + 0.03618161, + -0.0041524363, + -0.007822782, + 0.019363126, + -0.026678968, + -0.048825294, + 0.041787773, + 0.03250381, + -0.0009753628, + 0.0054620313, + 0.017086865, + -0.011679503, + -0.022245726, + -0.010153713, + -0.029641088, + 0.013289784, + 0.023696966, + 0.048109613, + -0.003876601, + -0.0017991405, + 0.020714967, + 0.03528701, + -0.027494047, + -0.017822426, + -0.000420897, + -0.018418824, + 0.024531927, + 0.040634733, + 0.028150087, + -0.010755083, + -0.008439062, + 0.04461073, + 0.007499732, + -0.025863888, + 0.030555569, + -0.028746488, + 0.0027633207, + -0.0028080507, + -0.008106072, + -0.0053924513, + 0.0058099316, + 0.031450167, + 0.023498166, + 0.011490643, + -0.003946181, + 0.007385422, + 0.026639208, + -0.04234441, + 0.0034119058, + -0.047274653, + 0.0052980212, + 0.0026142208, + 0.025207847, + -0.010755083, + 0.012762964, + 0.031132089, + 0.012474704, + 0.022265606, + 0.04282153, + 0.007355602, + 0.0097561125, + -0.016609745, + -0.058009855, + -0.020893887, + -0.029959168, + -0.000963559, + 0.017255845, + -0.020635447, + -0.009129892, + 0.023597566, + -0.023677086, + -0.011659623, + 0.026420528, + 0.024929527, + 0.0015096379, + 0.007286022, + 0.016013345, + 0.029342888, + -0.012494584, + -0.015387124, + -0.046638492, + 0.0005808689, + -0.009020553, + -0.010506583, + -0.0022961407, + -0.0017320454, + -0.019720966, + 0.015188324, + -0.020734847, + 0.001775533, + -0.0022439556, + 0.029919408, + -0.003777201, + 0.06484858, + -0.003720046, + -0.008150802, + -0.015516344, + 0.013786783, + -0.06635946, + 0.007117042, + -0.0049724863, + 0.00073990895, + 0.011102983, + -0.020098686, + -0.0044879112, + -0.017116684, + 0.028547687, + 0.0058149016, + -0.012603924, + -0.03693705, + -0.020317366, + -0.0024029957, + -0.020933647, + -0.024392767, + -0.003772231, + 0.030654969, + 0.010456883, + -0.030873649, + -0.03606233, + -0.008230322, + -0.000918829, + -0.023696966, + 0.001904753, + -0.04512761, + -0.013438884, + -0.025585568, + -0.029183848, + -0.03375625, + 0.010327663, + 0.03242429, + -0.009050372, + 0.008160742, + -0.03427313, + -0.049183134, + 0.029422408, + -0.021987285, + 0.0025819158, + -0.011262023, + 0.014959704, + -0.0020724905, + -0.030953169, + 0.03453157, + -0.0027384707, + -0.011331603, + 0.060156897, + 0.008339662, + 0.00091448025, + -0.039322652, + -0.0013742053, + 0.03226525, + -0.03321949, + -0.002004153, + 0.06556426, + -0.008946002, + -0.023875887, + 0.0012238629, + 0.0013829028, + 0.013717203, + -0.022961406, + 0.026241608, + -0.024452407, + -0.028189847, + -0.020039046, + -0.0018836305, + -0.021430645, + 0.0052532917, + -0.018488405, + 0.011301783, + -0.0059292116, + 0.018319424, + 0.008066312, + -0.0002029935, + -0.024730727, + 0.010745143, + 0.023836127, + 0.022365006, + -0.018518224, + 0.022702966, + -0.007350632, + -0.0015531254, + -0.014025344, + 0.015407004, + -0.017275725, + -0.0014524829, + 0.006441122, + 0.003921331, + 0.0054570613, + -0.010715323, + -0.032483928, + -0.0026564656, + 0.03584365, + -0.0061429217, + 0.012951824, + 0.030992929, + -0.022504166, + 0.00093436026, + 0.017335365, + 0.04508785, + 0.03492917, + 0.007131952, + -0.0028826008, + -0.014442824, + 0.018269725, + 0.021271605, + -0.016053105, + -0.020108625, + -0.029760368, + -0.0052334117, + 0.040833533, + 0.002319748, + 0.0068337517, + -0.014780784, + 0.006769142, + 0.0054471213, + -0.00095424027, + -0.03910397, + 0.023975287, + -0.015198264, + 0.016381124, + 0.022702966, + -0.025923528, + 0.002024033, + -0.009621923, + -0.004868116, + -0.019899886, + 0.019522166, + 0.028329007, + -0.052284416, + -0.0010331391, + -0.027474167, + 0.014830484, + 0.008906242, + 0.03604245, + -0.023557806, + 0.023359006, + 0.0021731332, + -0.008488762, + 0.03659909, + -0.019750785, + -0.016361244, + 0.011729203, + -0.026559688, + -0.015814545, + 0.010516523, + -0.018180264, + -0.065047376, + 0.011550283, + 0.007619012, + -0.07443074, + -0.006709502, + 0.03789129, + -0.007305902, + -0.027692847, + -0.008583193, + 0.014949764, + -0.032006808, + 0.020078806, + 0.061667778, + 0.014015404, + 0.021788485, + 0.0029571508, + -0.0005147058, + 0.0044307564, + -0.015645564, + -0.07121018, + 0.007355602, + -0.014413004, + 0.03286165, + 0.015804604, + -0.0015481554, + 0.030674849, + -0.03640029, + 0.0097561125, + -0.017673325, + -0.013796723, + 0.03387553, + 0.019790545, + 0.026022928, + 0.009010613, + -0.025744608, + -0.0002453938, + -0.022842126, + 0.058447216, + -0.03719549, + 0.027633207, + -0.006774112, + -0.012146683, + 0.016768785, + -0.0045127613, + 0.009462883, + 0.039362412, + -0.03238453, + -0.004000851, + 0.020416766, + 0.018130565, + 0.0082551725, + 0.003792111, + -0.008220382, + -0.0090354625, + 0.016033225, + -0.00014125675, + -0.0026092508, + 0.015715145, + -0.0044009364, + -0.018776665, + -0.011202383, + 0.019522166, + -0.052841056, + -0.00079892774, + 0.041668493, + 0.03737441, + -0.013230144, + 0.021132445, + 0.004219531, + -0.04278177, + -0.018886006, + -0.021251725, + 0.024154207, + -0.009656712, + 0.04274201, + 0.014611804, + 0.001763108, + 0.0098058125, + -0.0060236417, + -0.0017519254, + 0.010864423, + 0.020675207, + -0.021033047, + 0.003993396, + 0.014452764, + -0.03993893, + 0.025466288, + 0.022524046, + -0.008598102, + 0.000807004, + -0.0023185057, + 0.017295605, + -0.012574104, + -0.023796367, + 0.012822604, + 0.025943408, + -0.004463061, + 0.0006352283, + 0.008439062, + -0.008886362, + -0.03353757, + 0.022563806, + -0.000978469, + -0.0009703928, + 0.0013207778, + -0.00025067443, + -0.047513213, + 0.06437146, + 0.010407183, + 0.0008132165, + -0.0051290416, + 0.029959168, + -0.0090652825, + -0.001760623, + 0.019035105, + 0.04329865, + 0.021887885, + 0.025227727, + 0.0029273308, + 0.030654969, + -0.024810247, + -0.022325246, + 0.024551807, + -0.028229607, + 0.031291127, + 0.029303128, + 0.03347793, + 0.020595687, + 0.022643326, + 0.030615209, + 0.03379601, + 0.006272142, + 0.016152505, + 0.0015730055, + -0.03451169, + 0.019293545, + 0.009159712, + -0.0017071954, + -0.011431003, + 0.03816961, + -0.027454287, + 0.024850007, + -0.016798604, + 0.019333305, + -0.0047314414, + 0.03447193, + -0.006421242, + 0.023557806, + 0.029004928, + -0.022126446, + 0.029124208, + 0.016639564, + 0.023060806, + 0.009527492, + -0.047234893, + 0.008667682, + 0.029521808, + 0.014104864, + -0.008444033, + 0.019273665, + 0.022484286, + -0.012504524, + 0.030118208, + 0.0024005107, + 0.0024676058, + -0.0082054725, + 0.026142208, + 0.010198443, + -0.001658738, + 0.021172205, + 0.023995167, + -0.009701443, + 0.0025471258, + 0.03282189, + 0.0058049615, + 0.0027956257, + 0.028428407, + -0.0034243308, + -0.0047960514, + 0.024193967, + -0.0139160035, + 0.012802724, + -0.008369482, + -0.011311723, + -0.031768247, + 0.032762248, + 0.0012226204, + 0.019780606, + 0.010496643, + 0.006510702, + -0.009244203, + -0.0060385517, + -0.007748232, + -0.015844364, + -0.020834247, + 0.0068586017, + 0.012057223, + -0.028309127, + 0.009000673, + -0.0058745416, + 0.015665444, + 0.007161772, + 0.024969287, + -0.040754013, + -0.024034927, + 0.012464764, + -0.041072093, + 0.0082154125, + -0.014711204, + 0.040754013, + -0.006441122, + 0.015377184, + -0.03797081, + -0.024571687, + 0.03170861, + -0.017285665, + -0.026122328, + -0.0013307178, + -0.010794843, + 0.006674712, + -0.004239411, + 0.052204896, + -0.023975287, + -0.0023011107, + -0.03347793, + 0.03162909, + 0.03651957, + -0.024512047, + 0.016560044, + 0.007763142, + 0.003367176, + 0.018647445, + -0.03323937, + 0.0077929622, + 0.015695265, + 0.004028186, + 0.011083103, + 0.016092865, + -0.014263904, + 0.030953169, + 0.002411693, + -0.013627743, + -0.017096804, + -0.0105960425, + -0.031151969, + 0.008752173, + 0.007499732, + -0.03703645, + -0.04524689, + -0.017454645, + -0.009318752, + 0.016331425, + 0.017941706, + 0.052761536, + 0.011331603, + 0.058168896, + 0.049103614, + 0.022424646, + 0.03250381, + 0.009537432, + -0.010934003, + -0.021569805, + 0.007236322, + 0.018895945, + -0.0015779755, + -0.020993287, + -0.008970853, + -0.029462168, + 0.008150802, + 0.012842484, + 0.013200324, + 0.00045506575, + -0.011778903, + 0.011540343, + 0.003225531, + 0.027752487, + -0.009522523, + 0.013727143, + 0.014681384, + 0.010516523, + -0.007867512, + 0.017216085, + -0.0047910814, + 0.003976001, + 0.03735453, + -0.025068687, + 0.04361673, + 0.04321913, + 0.029541688, + -0.0015208204, + 0.0051290416, + -0.0010256841, + 0.026500048, + -0.00057931576, + 0.023160206, + 0.0090354625, + -0.017454645, + -0.04345769, + -0.001827718, + -0.023498166, + -0.008125952, + 0.006217472, + 0.025486168, + -0.019432705, + 0.010407183, + 0.032006808, + -0.03250381, + 0.019373065, + 0.007107102, + -0.006585252, + 0.049183134, + 0.023597566, + 0.026460288, + 0.021271605, + 0.019263726, + 0.012087043, + 0.010864423, + 0.029601328, + 0.006490822, + 0.006341722, + -0.00012300753, + -0.0009902727, + 0.020476406, + -0.03453157, + 0.026539808, + -0.023796367, + -0.0057950215, + -0.003570946, + -0.013776843, + 0.00049513637, + 0.001666193, + 0.047195133, + 0.020377006, + -0.023875887, + 0.003623131, + 0.025724728, + -0.029482048, + -0.007156802, + -0.012832544, + -0.016609745, + -0.011212323, + 0.049501214, + -0.010034433, + -0.010456883, + -0.013329544, + 0.017335365, + -0.023438526, + 0.008657742, + -0.007723382, + -0.006490822, + 0.0023359007, + 0.0059640016, + -0.0138762435, + -0.0053775413, + 0.03479001, + -0.029243488, + -0.011609923, + -0.006242322, + 0.00025347006, + -0.04294081, + -0.025187967, + 0.029482048, + 0.03759309, + 0.013587983, + 0.018687205, + -0.03371649, + 0.017275725, + 5.8242204e-05, + 0.007102132, + 0.007276082, + -0.009790903, + -0.026122328, + -0.03310021, + 0.014174444, + 0.019482406, + 0.030018808, + 0.013458764, + 0.03622137, + -0.029641088, + 0.017166385, + -0.011371363, + 0.03914373, + 0.026360888, + 0.018995345, + -0.006366572, + -0.027036808, + -0.015913945, + 0.023776487, + 0.006719442, + -0.018150445, + 0.007271112, + -0.012882244, + -0.017295605, + -0.026102448, + 0.015735025, + -0.003754836, + 0.0048258714, + -0.022404766, + -0.040913053, + 0.016251905, + -0.030217608, + -0.028309127, + -0.04397457, + -0.03504845, + -0.012395184, + -0.039402172, + -0.019542046, + 0.03584365, + -0.020555926, + 0.0053029913, + -0.030217608, + 0.013170504, + -0.017424826, + 0.04357697, + 0.008439062, + 0.012385244, + 0.021410765, + 0.040594973, + 0.009989703, + 0.021748725, + 0.041429933, + -0.019204086, + -0.013737083, + -0.0055564614, + 0.025068687, + 0.04286129, + 0.00016866942, + 0.025048807, + -0.014234084, + 0.0015904005, + -0.024472287, + -0.003556036, + -0.0011983915, + -0.029541688, + -0.030237488, + 0.012444884, + -0.039760012, + 0.000839309, + 0.022365006, + 0.028786248, + -0.03315985, + 0.025744608, + -0.007067342, + -0.012643684, + -0.026102448, + 0.001646313, + 0.019243846, + -0.023875887, + -0.010735203, + -0.017474525, + 0.0077929622, + 0.04425289, + -0.026181968, + -0.017305546, + -0.018687205, + 0.032086328, + 0.009070252, + 0.017872125, + 0.003565976, + 0.0025844008, + 0.011599983, + 0.027692847, + 0.023140326, + -0.004130071, + 0.03496893, + 0.018051045, + -0.03437253, + -0.011122863, + 0.0053825113, + -0.012534344, + 0.007837692, + 0.030515809, + 0.006207532, + -0.03292129, + -0.011252083, + 0.009830663, + -0.010228263, + 0.013468704, + -0.011878303, + -0.008125952, + 0.04317937, + -0.04536617, + 0.003700166, + -0.016440764, + 0.020635447, + 0.0038964811, + -0.028070567, + -0.015049164, + -0.0051340116, + -0.0041027362, + -0.004299051, + -0.054630253, + -0.0048879962, + 0.0139259435, + 0.017116684, + 0.010387303, + 0.012872304, + -0.019780606, + -0.0017892005, + 0.009020553, + 0.010218323, + 0.032642968, + 0.004818416, + -0.026380768, + 0.0058248416, + 0.040952813, + -0.021828245, + -0.014452764, + 0.0011344028, + -0.03194717, + 0.007857572, + -0.012067163, + -0.0047264714, + 0.0022153782, + -0.0030764309, + -0.021052927, + -0.015983524, + 0.00041685888, + -0.022464406, + 0.0089957025, + -0.03974013, + 0.0022663206, + 0.052284416, + -0.020436646, + 0.026758488, + -0.0027484107, + 0.0056757415, + -0.0058795116, + 0.011093043, + 0.023199966, + -0.00015049786, + 0.012444884, + 0.03296105, + -0.010536403, + -0.011321663, + -0.020029105, + 0.007991762, + -0.041867293, + 0.025863888, + -0.003715076, + 0.016490465, + 0.014333484, + 0.0034566359, + 0.029998928, + -0.003106251, + -0.006257232, + -0.027613327, + -0.023239726, + -0.013230144, + -0.054113377, + 0.03447193, + -0.008001702, + 0.012037343, + 0.009597072, + 0.017554045, + -0.0021905282, + -0.001618978, + 0.0082651125, + -0.0011232203, + -0.007927152, + -0.018478464, + -0.009974793, + -0.019303486, + -0.039600972, + -0.0155859245, + 0.030476049, + 0.017226025, + -0.024313247, + 0.010039403, + 0.004234441, + -0.018707085, + 0.011023463, + -0.018488405, + 0.016798604, + 0.04373601, + -0.024710847, + 0.0069281817, + 0.0028279307, + 0.004015761, + -0.009085163, + 0.0030590359, + -0.0059242416, + -0.010705383, + -0.012653624, + 0.000961074, + 0.011142743, + -0.003352266, + 0.047871053, + -0.04544569, + -0.0015233054, + 0.023875887, + 0.00043021573, + 0.007474882, + -0.017206145, + -0.004443181, + -0.014035284, + 0.018001346, + 0.0011685715, + -0.020635447, + 0.0014760904, + 0.027832007, + 0.015397064, + -0.031489927, + 0.015327484, + 0.014015404, + 0.013518404, + 0.028905528, + -0.024690967, + 0.024810247, + -0.026181968, + -0.015317544, + 0.010993643, + 0.011450883, + 0.030297128, + 0.009920123, + -0.020068865, + 0.0053029913, + 0.015108804, + 0.0007604102, + -0.019144446, + -0.017365186, + 0.003215591, + 0.0025148208, + -0.023975287, + 0.004709076, + -0.021390885, + -0.012027403, + 0.024869887, + -0.028905528, + -0.03638041, + 0.0043015364, + -0.029859768, + 0.006232382, + -0.0069629718, + -0.0024464831, + 0.013120804, + 0.031231489, + 0.0082154125, + 0.0013232628, + 0.0028552657, + -0.009219352, + 0.019144446, + -0.006774112, + 0.0034094208, + -0.03500869, + 0.026221728, + -0.026718728, + 0.03735453, + 0.0155958645, + -0.0059242416, + -0.023677086, + -0.014691324, + 0.013359364, + -0.004299051, + 0.016500404, + 0.009209412, + 0.004085341, + -0.010774963, + -0.004738896, + 0.016490465, + -0.027315127, + 0.021033047, + -0.03445205, + 0.031092329, + 0.006749262, + -0.008806842, + -0.008056372, + -0.04441193, + 0.014184384, + -0.0155859245, + 0.016560044, + 0.007375482, + 0.011440943, + -0.026162088, + -0.018120624, + -0.012772904 + ], + "Category": "Suite", + "Tags": [ + "restaurant", + "bar", + "continental breakfast" + ] + }, + { + "@search.action": "mergeOrUpload", + "HotelId": "4", + "HotelName": "Sublime Palace Hotel", + "Description": "Sublime Palace Hotel is located in the heart of the historic center of Sublime in an extremely vibrant and lively area within short walking distance to the sites and landmarks of the city and is surrounded by the extraordinary beauty of churches, buildings, shops and monuments. Sublime Cliff is part of a lovingly restored 19th century resort, updated for every modern convenience.", + "DescriptionVector": [ + -0.05908324, + 0.007784768, + 0.027741672, + -0.007171661, + -0.027066693, + -0.017335733, + -0.014467069, + -0.03316401, + 0.0033411505, + -0.017594475, + 0.0030992825, + 0.00602982, + -0.018033212, + -0.02180185, + -0.0069016693, + 0.05395339, + -0.0099728275, + 0.023466801, + 0.05804827, + 0.06452808, + -0.02112687, + -0.046708606, + -0.012149638, + -0.011530906, + -0.007773518, + 0.025784232, + -0.045516144, + -0.027269186, + 0.054538373, + 0.027021695, + 0.0035492692, + -0.024434272, + 0.06308812, + 0.011137168, + 0.019023184, + 0.0043283086, + 0.00014492733, + -0.013240855, + 0.0066485517, + -0.0035548941, + -0.012633373, + -0.026864199, + -0.001743698, + -0.008364126, + -0.02483926, + 0.023646794, + -0.016593255, + 0.06268313, + -0.0013612094, + 0.03617892, + 0.012138388, + -0.011868396, + -0.02920413, + -0.03386149, + -0.02951912, + -0.011542156, + 0.016615754, + -0.041376267, + 0.029789113, + 0.00015046426, + 0.024254277, + -0.007014166, + 0.004055504, + 0.03660641, + -0.067093, + 0.036696404, + -0.049903512, + 0.023601796, + 0.0068566706, + -0.015918275, + 0.019428171, + 0.0022274335, + -0.01409583, + 0.0017633849, + 0.026909199, + 0.0051185973, + 0.029766612, + 0.034581468, + -0.012228386, + -0.042343736, + -0.042096246, + 0.0267967, + -0.00058568566, + 0.08225755, + -0.02040689, + 0.0015960461, + -0.04733859, + 0.03982381, + -0.032691527, + 0.031701554, + 0.06808297, + -0.01509705, + -0.055573344, + -0.01741448, + -0.034401473, + -0.02987911, + 0.012127139, + -0.0035323948, + -0.0368314, + 0.0536834, + 0.036741406, + -0.09971703, + -0.0013823025, + -0.04958852, + 0.015142049, + 0.013128359, + 0.048418555, + 0.0013963646, + 0.01040594, + -0.03660641, + -0.09305722, + 0.01273462, + -0.04517865, + 0.008679116, + -0.004381744, + 0.010186572, + 0.03327651, + 0.004845793, + -0.015592035, + 0.013848337, + -0.021565607, + 0.008515996, + -0.023241807, + -0.0137470905, + 0.013297103, + -0.046933603, + 0.038698845, + -0.058813248, + -0.016222017, + -0.044661168, + -0.010642183, + -0.030351596, + -0.043783695, + -0.015209546, + 0.02618922, + 0.036291417, + 0.00054314785, + -0.012768369, + -0.03219654, + -0.039036337, + -0.0035633312, + 0.0038558226, + -0.032691527, + 0.0069860416, + -0.025289247, + 0.03552644, + -0.02751668, + -0.013218356, + -0.036403913, + 0.047563583, + 0.013915835, + 0.03863135, + -0.017943215, + -0.008487872, + -0.056518316, + -0.023916787, + 0.01573828, + -0.035076454, + -0.021621855, + 0.056788307, + -0.046843603, + -0.019158179, + -0.04088128, + -0.030014105, + 0.011289039, + 0.0141408285, + -0.045763634, + 0.022713073, + -0.056833304, + -0.014005832, + 0.037416384, + -0.0040639415, + 0.005188908, + 0.041646257, + 0.025986725, + -0.00163542, + 0.029001635, + -0.0070647895, + -0.020103151, + 0.02411928, + 0.0058160764, + -0.023399303, + 0.08320252, + 0.006839796, + 0.040498793, + 0.0013471473, + -0.022094341, + 0.019551918, + -0.01407333, + 0.037776373, + 0.012464629, + -0.029766612, + -0.040431295, + 0.032309037, + -0.01943942, + 0.011902145, + -0.036066424, + 0.047788575, + -0.02848415, + -0.027561678, + -0.042793725, + -0.016075771, + 0.040431295, + 0.0017858843, + 0.004474554, + -0.047923572, + 0.04832856, + 0.014107079, + -0.015569536, + 0.017391982, + -0.011401535, + 0.003405836, + -0.009505967, + 0.022420581, + 0.028574148, + 0.00089223904, + -0.008386625, + -0.009460968, + 0.0071097882, + 0.034671467, + -0.023939285, + -0.010372191, + 0.014489568, + 0.02985661, + 0.0023539923, + 0.03311901, + 0.0067272997, + -0.015704531, + 0.00804351, + -0.009775959, + 0.026661705, + -0.028011665, + -0.035863932, + -0.0045983004, + -0.032309037, + -0.0044323676, + 0.015805779, + -0.01056906, + 0.022915566, + -0.00073966547, + 0.028056664, + -0.005388589, + 0.015817028, + 0.005020163, + 0.03226404, + -0.0155020375, + 0.028281657, + -0.007272908, + 0.0026042974, + 0.033096515, + -0.014905806, + 0.024636766, + 0.015569536, + -0.0301491, + 0.0103384415, + 0.014467069, + -0.033411503, + -0.03959882, + -0.00071540836, + -0.020913126, + 0.03890134, + 2.0005507e-05, + -0.011182167, + 0.00074177474, + 0.022656824, + -0.03719139, + -0.0085047465, + 0.024929257, + -0.020508138, + -0.023511799, + -0.033726495, + -0.011924645, + 0.020204397, + -0.03226404, + 0.04124127, + -0.014455819, + 0.0041398765, + -0.021329364, + -0.023376804, + 0.032016546, + -0.021385612, + -0.032646526, + 0.0032877144, + 0.00095059664, + -0.027269186, + 0.0027083568, + 0.03185905, + 0.008060385, + -0.011120293, + 0.014467069, + 0.021104371, + -0.002588829, + 0.0066541764, + -0.011564655, + -0.067093, + -0.010096574, + -0.0135895945, + -0.019968154, + 0.0018618195, + 0.030171601, + 0.0015651096, + -0.000103356295, + 0.038856342, + 0.026751703, + -0.00072208786, + 0.01878694, + -0.01777447, + -0.03485146, + -0.015400791, + 0.004840168, + 0.014107079, + -0.013510847, + -0.0045392397, + -0.047383588, + -0.018291954, + -0.024996756, + -0.021363113, + 0.023601796, + -0.016615754, + -0.048823543, + 0.093147226, + 0.02046314, + 0.0029305376, + 0.026211718, + 0.012149638, + -0.013364602, + -0.0010926237, + 0.0072672833, + -0.006918544, + -0.02411928, + 0.03318651, + -0.035661437, + 0.034288976, + 0.0021810287, + 0.015490788, + 0.00094567495, + -0.010782803, + 0.035053954, + 0.031094072, + 0.024884257, + -0.031994045, + -0.03156656, + -0.00318928, + 0.013229606, + 0.022341834, + -0.013882086, + -0.054223385, + 0.0068172966, + 0.06529305, + 0.013207106, + -0.06686801, + -0.06308812, + -0.036403913, + -0.018280705, + 0.031206569, + 0.032331537, + 0.03455897, + 0.020744381, + -0.04661861, + 0.023534298, + -0.0057007675, + -0.009635338, + 0.0064910566, + 0.033703994, + 0.028619146, + 0.014815808, + -0.029316626, + 0.028371654, + -0.00905598, + 0.028821642, + -0.072627835, + -0.027854169, + 0.0012810555, + 0.006766673, + -0.015693283, + 0.023939285, + 0.04227624, + -0.015367042, + -0.036021426, + -0.039216332, + 0.018854437, + -0.03887884, + 0.04333371, + 0.0028700707, + 0.058228265, + -0.034153983, + 0.035053954, + -0.03262403, + -0.036696404, + 0.023624295, + -0.0036730154, + -0.038811345, + 0.011401535, + -0.0117558995, + 0.027921667, + 0.009162852, + 0.037078895, + 0.031049075, + -0.015592035, + -0.010647807, + 0.015828278, + -0.036088925, + -0.0031639682, + -0.073032826, + 0.0027392933, + -0.05431338, + -0.026549209, + -0.060208205, + -0.023309305, + -0.011969643, + -0.03419898, + -0.03419898, + 0.0234893, + 0.0048289187, + -0.012903365, + -0.0018772878, + -0.024411771, + -0.02245433, + 0.0704229, + -0.0012648841, + 0.055618342, + -0.007846641, + 0.045831133, + 0.011789649, + 0.033636495, + 0.05098348, + -5.387535e-05, + -0.066103026, + -0.027786672, + -0.015018302, + 0.013882086, + -0.04796857, + 0.036696404, + 0.0006243564, + -0.016840748, + 0.050848484, + -0.025064252, + -0.002796948, + -0.03998131, + -0.02886664, + -0.030306596, + -0.058273263, + -0.06403309, + 0.016638255, + -0.0013970677, + 0.05161346, + 0.030711584, + 0.031724054, + -0.02276932, + -0.027561678, + -0.021228118, + 0.0054645245, + -0.012543376, + -0.004603925, + 0.033231508, + 0.018426951, + 0.039306328, + -0.032286536, + 0.020418141, + 0.005022975, + 0.057283293, + -0.010366566, + -0.007863516, + 0.020361893, + 0.027944166, + -0.028101662, + -0.009927829, + 0.05075849, + 0.0017957278, + -0.038158864, + -0.013364602, + -0.027359184, + 0.03212904, + 0.0067272997, + -0.011491532, + -0.072987825, + -0.01644701, + 0.039351325, + 0.023331804, + 0.022893067, + -0.040003806, + -0.025986725, + -0.0006162707, + -0.004345183, + -0.02077813, + -0.03795637, + -0.0040920656, + 0.03883384, + -0.07978262, + -0.030126601, + -0.0035155201, + -0.0037405135, + -0.0014322229, + -0.008589119, + -0.007278533, + 0.064303085, + -0.061153177, + 0.0027491369, + -0.014692062, + 0.020249397, + 0.014703312, + 0.01205964, + -0.040363796, + 0.008009762, + 0.027359184, + 0.014444569, + -0.015614535, + 0.019540668, + 0.02479426, + -0.03154406, + 0.024749262, + -0.020620635, + 0.024096781, + -0.008296628, + 0.016469508, + 0.011305913, + -0.022341834, + -0.015052051, + 0.0108953, + -0.015727032, + 0.023736792, + -0.004927353, + -0.0056023328, + 0.0030430343, + 0.0032905268, + 0.06295312, + -0.041668758, + -0.0184832, + 0.019585665, + -0.011879646, + -0.020710632, + -0.07073789, + -0.02515425, + 0.044661168, + -0.001587609, + 0.015862027, + 0.023196809, + 0.010737805, + 0.023061812, + -0.018865688, + 0.013915835, + 0.032151543, + 0.008251629, + -0.008639743, + -0.00871849, + -0.015637035, + 0.009865955, + -0.005183283, + 0.019900657, + 0.038721345, + 0.043221213, + 0.013252105, + 0.017718222, + -0.015524537, + -0.011902145, + -0.01709949, + 0.009770334, + 0.040656287, + -0.027899168, + 0.016379511, + 0.0020122838, + -0.0022668075, + 0.021149369, + 0.0009787208, + 0.016233265, + 0.015850777, + 0.02283682, + 0.036853902, + 0.009832207, + 0.014557066, + 0.012014641, + 0.013634593, + -0.0094497185, + 0.016537007, + -0.014152078, + -0.01816821, + 0.005664206, + -0.051793456, + -0.017200736, + -0.0009618463, + 0.007914139, + -0.010647807, + 0.026279217, + -0.009235974, + -0.023241807, + -0.02077813, + 0.015052051, + 0.011699651, + -0.005683893, + 0.03390649, + 0.01088405, + 0.022656824, + 0.020969374, + -0.028461652, + 0.024501769, + 0.015445789, + -0.023241807, + -0.007076039, + -0.039013837, + -0.002074157, + -0.02147561, + 0.0041258144, + -0.0054898364, + 0.0027603866, + -0.002393366, + 0.031994045, + -0.009359721, + 0.027111692, + 0.006344811, + -0.01984441, + 0.013859587, + -0.005568584, + 0.014905806, + 0.015355792, + -0.05858825, + 0.0034086483, + 0.0010933268, + -0.026009224, + -0.013634593, + -0.016390761, + -0.00920785, + 0.004547677, + 0.0039992556, + 0.013803339, + 0.0006879873, + 0.050353497, + 0.011401535, + -0.04452617, + -0.034288976, + -0.0028335094, + 0.019068182, + 0.044571172, + 0.026751703, + -0.030104103, + -0.013240855, + 0.0058554504, + 0.036583908, + -0.019326923, + -0.0038501977, + 0.0005192423, + -0.004210187, + 0.025649235, + 0.015648283, + -0.013634593, + 0.0060579446, + 0.015862027, + 0.026549209, + -0.019270675, + 0.048283562, + -0.031994045, + -0.011857146, + 0.024749262, + 0.00888161, + -0.0066598016, + -0.015727032, + 0.024884257, + -0.012892116, + 0.01943942, + 0.00687917, + 0.020823129, + -0.036943898, + -0.001269806, + -0.008949108, + 0.004677048, + 0.001615733, + -0.015670782, + 0.015884526, + -0.010349692, + -0.016998243, + 0.014455819, + 0.068982944, + 0.012554626, + 0.011733401, + 0.022094341, + -5.6380155e-05, + 0.05264843, + 0.0060916934, + -0.07001791, + -0.0047361087, + 0.0038080115, + 0.004952665, + 0.027089192, + -0.03525645, + -0.001378787, + -0.0038980087, + -0.007711645, + -0.0065585542, + 0.031949047, + -0.009517216, + -0.020834379, + 0.011469033, + -0.0063841846, + 0.020496888, + -0.0042608106, + -0.00770602, + -0.007886015, + -0.032669026, + -0.032804023, + 0.0070647895, + -0.026234217, + 0.008397874, + -0.022904318, + 0.022589326, + -0.035728935, + 0.0010891082, + -0.021531858, + -0.0054757744, + -0.04292872, + 0.013240855, + -0.022578077, + 0.020069402, + -6.429008e-05, + 0.008150382, + -0.016357012, + -0.017043242, + 0.031206569, + -0.036313917, + 0.02076688, + 0.023106812, + -0.038743846, + -0.00071927544, + 0.04832856, + -0.009151602, + 0.0055517093, + -0.014995803, + 0.0022316521, + -0.027989166, + -0.020530637, + 0.015670782, + 0.07478777, + 0.0071941605, + -0.01641326, + -0.013454599, + -0.008470997, + 0.0036589534, + -0.012779619, + -0.03426648, + 0.034963958, + -0.0035689562, + -0.014568316, + 0.007436028, + -0.021565607, + -0.021340614, + -0.0028742894, + 0.03284902, + -0.011609654, + -0.015468289, + -0.024569267, + -0.049903512, + 0.031004075, + -0.010979673, + 0.0063223117, + 0.018370703, + -0.0011706682, + 0.03527895, + -0.03485146, + 0.016885746, + 0.0071885358, + -0.013533346, + 0.027044194, + 0.002027752, + -0.006069194, + 0.0025592986, + 0.046573613, + 0.028934138, + -0.0016986993, + -0.021531858, + 0.03896884, + -0.010141573, + -0.029429123, + -0.033748996, + -0.011767149, + -0.031206569, + 0.006294187, + 0.02884414, + -0.015468289, + -0.0060579446, + -0.020688133, + -0.0043901815, + 0.001385115, + 0.034738965, + 0.0075260256, + 0.029789113, + -0.022195589, + 0.0011390286, + 0.03818136, + 0.0031724055, + -0.026954196, + 0.031139072, + 0.028056664, + 0.03246653, + 0.003718014, + -0.01240838, + -0.024996756, + 0.01642451, + 0.005967947, + 0.02720169, + -0.061603162, + 0.0056867055, + 0.019585665, + 0.026571708, + 0.005776703, + 0.00906723, + -0.03554894, + -0.00887036, + 0.038766343, + -0.040993776, + 0.02280307, + 0.055798337, + 0.0024453958, + 0.014129579, + -0.0006855264, + -0.0033608372, + -0.0022091528, + 0.028731644, + -0.023939285, + 0.004536427, + 0.0009745022, + -0.010321567, + -0.010332817, + -0.029316626, + -0.012025892, + 0.014579565, + 0.010214696, + 0.0033242758, + -0.012565875, + -0.008594744, + 0.020204397, + -0.013578345, + -0.05188345, + -0.042073745, + -0.015187047, + -0.008769114, + 0.029001635, + -0.017009493, + 0.014455819, + -0.021340614, + 0.032399032, + 0.010951549, + 0.014827058, + 0.0026872635, + 0.038653847, + -0.016840748, + 0.008954733, + 0.024569267, + -0.039711315, + 0.026729204, + -0.014647063, + -0.015198297, + 0.02954162, + 0.031409062, + 0.0017886966, + 0.037731376, + 0.0043367455, + -0.013702092, + 0.026346715, + 0.005804827, + 0.011969643, + 0.0052873422, + -0.029654115, + -0.054178383, + 0.0018561947, + 0.0026057037, + -0.023579298, + -0.039846312, + 0.03883384, + 0.008527245, + -0.019315675, + 0.008487872, + -0.0049020415, + -0.047158595, + 0.014973303, + 0.0012016048, + -0.017560726, + 0.013454599, + 0.01709949, + 0.005804827, + 0.037708875, + 0.006440433, + -0.043446206, + -0.007076039, + -0.020620635, + -0.0020825942, + 0.02983411, + 0.011778398, + 0.0078072674, + -0.021745602, + 0.049273532, + -0.0051270346, + -0.014287074, + 0.0155020375, + -0.022735571, + 0.0029164755, + 0.030689085, + -0.03755138, + 0.045493644, + 0.013128359, + 0.031724054, + -0.018078212, + 0.00022024734, + -0.030036604, + -0.002812416, + 0.007481027, + -0.03422148, + 0.030599087, + 0.03660641, + -0.022263085, + 0.012093389, + 0.02879914, + -0.004778295, + 0.01274587, + -0.022566827, + -0.025761733, + 0.015963275, + -0.0012782431, + 0.008133507, + 0.00285179, + 0.010630933, + -0.007492277, + 0.028349156, + -0.0335465, + 0.017594475, + -0.01122154, + 0.0060354453, + 0.043356206, + -0.004755796, + -0.019034432, + 0.04092628, + 0.026616706, + -0.03282652, + 0.0027055442, + 0.0109178, + -0.007756644, + 0.011542156, + 0.03991381, + 0.026954196, + 0.025986725, + 0.028731644, + 0.04468367, + 0.017155739, + 0.04661861, + -0.0014350354, + -0.0068341712, + 0.020721883, + 0.01644701, + -0.04868855, + 0.029901609, + 0.011272164, + 0.013038361, + -0.020305645, + 0.007239159, + 0.0058498257, + -0.010147197, + -0.0034367726, + -0.026459211, + 0.016390761, + -0.03788887, + -0.0139045855, + -0.022296835, + -0.00077411754, + -0.012914615, + -0.0267967, + -0.00887036, + -7.852793e-05, + -0.017043242, + -0.0043620574, + 0.006710425, + 0.028641647, + -0.039396327, + 0.058768246, + -0.021340614, + -0.0066935504, + -0.019698163, + -0.0059566973, + -0.036763903, + 0.008864735, + 0.030666586, + 0.03127407, + 0.0023666483, + -0.036583908, + 0.018471949, + 0.050488494, + 0.011823397, + -0.01122154, + 0.006209815, + 0.024884257, + 0.00022516907, + 0.0015862028, + 0.0072222846, + 0.0100797, + -0.01645826, + -0.03386149, + 0.004170813, + 0.006575429, + -0.022094341, + 0.016199516, + -0.011789649, + 0.037686378, + 0.03624642, + -0.024186779, + -0.0083753755, + 0.006963542, + 0.0076328972, + -0.0142195765, + -0.023196809, + 0.012318383, + -0.01274587, + 0.0021866537, + -0.022859318, + 0.00871849, + 0.043131214, + 0.03581893, + -0.01576078, + 0.0017155738, + -0.04394119, + -0.013218356, + 0.0008254441, + 0.015873278, + -0.054133385, + -0.010625308, + -0.018719442, + 0.027021695, + -0.007644147, + -0.029271627, + -0.006350436, + -0.0143770715, + 0.018066961, + 0.018044462, + 0.015243296, + 0.026054224, + 0.0201594, + -0.03932883, + 1.0519096e-05, + 0.007008541, + -0.024096781, + 0.04063379, + 0.03388399, + -0.0010173916, + 0.0067497985, + 0.02920413, + 0.0017563539, + 0.0028771018, + 0.047833573, + -0.0077510187, + -0.04398619, + 0.040048804, + 0.03084658, + -0.023028063, + 0.024276776, + 0.019315675, + -0.0031442812, + 0.039306328, + -0.019068182, + -0.012520877, + 0.005889199, + -0.03381649, + -0.010726555, + -0.0137470905, + 0.0073235314, + -0.02582923, + 0.008988482, + 0.0008395062, + 0.023624295, + -0.03719139, + -0.0055854586, + 0.012678372, + 0.0058160764, + 0.0038080115, + -0.05134347, + 0.00805476, + -0.006440433, + 0.048013568, + -0.0012951177, + -0.006603553, + 0.028889138, + -0.02384929, + -0.023084313, + 0.004300184, + 0.0045589264, + 0.025311746, + 0.026751703, + 0.05237844, + -0.010119073, + 0.00821788, + 0.0044014314, + 0.01745948, + 0.025244247, + -0.01641326, + -0.0054785865, + 0.016278265, + -0.0060523194, + 0.013533346, + -0.009629712, + -0.022015594, + 0.029991606, + 0.0013626156, + 0.00528453, + -0.017346982, + -0.004162376, + 0.033389006, + 0.046708606, + 0.014534567, + 0.014849558, + -0.0218356, + -0.05161346, + -0.01808946, + -0.007644147, + 0.0029474122, + -0.0005048287, + -0.037866373, + 0.04223124, + -0.021295615, + 0.034086484, + 0.020238146, + 0.008144757, + 0.0095790895, + -0.0068285465, + 0.013792089, + 0.00854412, + -0.020654384, + -0.011542156, + -0.027404184, + -0.0035998926, + -0.0028138224, + -0.012363382, + -0.0019419733, + -0.011868396, + 0.0013133984, + 0.021869348, + 0.015884526, + 0.008589119, + 0.0045111156, + -0.008600368, + -0.008549745, + -0.003838948, + -0.037438884, + 0.049498525, + 0.009140353, + -0.018888187, + 0.010619683, + -0.0005090473, + 0.010709681, + 0.0036955148, + -0.0059960713, + 0.01705449, + 0.024209278, + 0.020373143, + 0.033389006, + 0.022341834, + 0.0049161036, + -0.039486323, + -0.0073741553, + 0.03122907, + -0.025649235, + -0.022465581, + 0.010484687, + -0.014107079, + -0.03633642, + 0.0038164486, + -0.030171601, + -0.013454599, + -0.021419361, + -0.01749323, + -0.0010033294, + 0.022015594, + 0.00872974, + -0.049453527, + -0.0120708905, + 0.010900925, + 0.0091347275, + -0.014242075, + -0.019180678, + 0.009854706, + -0.020193148, + 0.03224154, + -0.021565607, + -0.015333293, + -0.00019001386, + 0.009427219, + -0.028304156, + 0.027899168, + 0.011530906, + -0.013533346, + 0.0115196565, + 0.012475878, + -0.014343322, + -0.024681764, + 0.0036955148, + -0.006181691, + 0.01240838, + -0.004350808, + 0.039306328, + 0.035503943, + -0.048733547, + -0.032714024, + 0.022409331, + 0.023511799, + -0.02147561, + 0.004075191, + 0.01642451, + 0.014827058, + -0.0016621379, + -0.020496888, + -0.010962798, + -0.016795749, + 0.0040245675, + -0.023241807, + -0.046078626, + 0.005318279, + -0.005731704, + -0.0005807639, + 0.042861223, + -0.046483614, + -0.01574953, + 0.0026872635, + -0.07321282, + 0.018854437, + -0.011733401, + 0.012678372, + -0.027134191, + -0.028574148, + 0.01879819, + -0.0077678934, + 0.024411771, + 0.03892384, + -0.0048289187, + -0.038406353, + 0.010270944, + -0.00076216477, + 0.023376804, + 0.010878426, + 0.005939823, + 2.8893182e-06, + 0.040296298, + -0.0029614742, + 0.025581738, + 0.011677152, + 0.015659533, + -0.03055409, + 0.006575429, + -0.0010595778, + 0.045763634, + 0.02040689, + -0.00163542, + 0.019529417, + 0.039486323, + 0.008679116, + 0.013184607, + 0.0073629054, + -0.029676614, + 0.008639743, + -0.015007053, + 0.0049779764, + 0.022656824, + 0.0002733317, + -0.07910764, + -0.026684204, + -0.015119549, + -0.008926609, + 0.010293443, + -0.0061535668, + -0.017290734, + 0.006294187, + -0.008594744, + -0.021633105, + -0.02452427, + 0.0018533822, + 0.055933334, + -0.026571708, + 0.024704263, + -0.018831939, + 0.012937115, + -0.008797238, + 0.036988895, + -0.0060523194, + -0.0088197375, + 0.00067603454, + -0.0076666465, + 0.030036604, + 0.01946192, + 0.028934138, + -0.04958852, + 0.0065191807, + -0.008409124, + 0.0068566706, + 0.013364602, + 0.021824349, + 0.026144221, + -0.0014849558, + 0.004812044, + -0.024884257, + 0.03125157, + -0.0184607, + -0.049768515, + -0.013105859, + 0.021621855, + -0.0010982485, + -0.03287152, + -0.005124222, + 0.011249664, + -0.030396594, + 0.008909734, + -0.017920716, + 0.01141841, + 0.011002172, + -0.01776322, + -0.006941043, + 0.024389273, + 0.007239159, + 0.0007818517, + 0.017178237, + 0.028776642, + -0.007925388, + -0.005939823, + -0.030306596, + 0.006305437, + 0.00031903345, + -0.01976566, + 0.0028292907, + -0.028596647, + -0.012644623, + -0.000331162, + -0.007340406, + -0.025896728, + 0.008532871, + 0.0028264783, + -0.02544674, + -0.043221213, + -0.01711074, + 0.022566827, + -0.0014399571, + 0.0070985383, + -0.002553674, + -0.018595695, + -0.008105383, + -0.022049343, + 0.02148686, + 0.005419526, + -0.0052957796, + -0.00528453, + 0.025356743, + -0.016390761, + -0.010062825, + 0.009477843, + 0.07339281, + 0.04124127, + 0.013949584, + -0.0059791966, + -0.002858821, + 0.023354303, + 0.0054364004, + -0.014275825, + -0.026324214, + 0.02483926, + 0.004145501, + -0.009050355, + -0.0067160497, + 0.006603553, + 0.04495366, + -0.0368314, + 0.049543522, + -0.02816916, + 0.00770602, + -0.0015749531, + 0.011424035, + 0.033636495, + -0.0073010325, + 0.012363382, + -0.016480759, + -0.03253403, + 0.015063301, + -0.013960834, + -0.010642183, + 0.0402738, + 0.01473706, + -0.0075147757, + -0.03055409, + 0.008954733, + -0.026369214, + 0.035323948, + -0.02416428, + 0.010479063, + -0.006462932, + 0.02652671, + -0.005427963, + -0.05296342, + 0.005076411, + 0.003574581, + -0.023466801, + 0.0015355792, + 0.009005357, + -0.05908324, + 0.016863247, + 0.011092169, + 0.033456504, + -0.009865955, + 0.03482896, + 0.014838307, + 0.018066961, + 0.027764171, + 0.014287074, + -0.005346403, + -0.014557066, + 0.00974221, + -0.018550698, + 0.006108568, + -0.025604237, + -0.01813446, + -0.017819468, + -0.0092191, + 0.00033872036, + -0.0065360554, + 0.0028911638, + -0.011857146, + 0.029766612, + -0.0011474658, + -0.0038895716, + 0.02447927, + -0.013510847, + 0.014320823, + 0.010495937, + -0.012183387, + -0.00034030236, + 0.0040976903, + -0.018246956, + 0.0369664, + 0.008628493, + -0.0005628348, + 0.0013724591, + -0.012487127, + -0.036156423, + 0.015164548, + 0.0100572, + 0.05233344, + 0.027314186, + -0.007537275, + -0.034693964, + 0.019338174, + -0.0035914555, + -0.009528466, + -0.031116573, + -0.012610874, + 0.029316626, + 0.009674711, + 0.00028387827, + 0.00019106852, + -0.0056051454, + 0.025964227, + 0.0061198175, + 0.0039120708, + 0.037483882, + 0.019315675, + 0.023005564, + -0.0032792771, + 0.013004612, + -0.015547036, + -0.015367042, + 0.002796948, + -0.030171601, + 0.025221748, + 0.02044064, + -0.0059566973, + 0.029361624, + 0.014984554, + 0.05534835, + 0.0011341068, + 0.004277685, + -0.013263355, + 0.013522097, + -0.003810824, + 0.0016579194, + -0.052918423, + 0.022578077, + -0.0038670723, + 0.0023033689, + -0.021025622, + 0.042793725, + -0.0021121246, + 0.024051784, + -0.03757388, + -0.0035858306, + -0.023376804, + -0.0057401415, + -0.008352876, + 0.014320823, + -0.01274587, + -0.03390649, + 0.06637302, + -0.034153983, + -0.0013450381, + 0.014714561, + -0.0056670187, + 0.0035380195, + -0.015580785, + 0.03660641, + 0.044931162, + 0.019191928, + 0.02787667, + 0.0007291189, + 0.031611558, + -0.009607214, + 0.037416384, + 0.005079224, + 0.0064291833, + -0.020631885, + 0.006963542, + 0.02044064, + 0.023376804, + -0.013049611, + -0.030576589, + 0.022105591, + -0.023916787, + 0.021711852, + 0.0468886, + 0.006249189, + 0.006766673, + -0.017346982, + -0.0070310403, + -0.017470729, + -0.01576078, + -0.049408525, + -0.015007053, + 0.035728935, + 0.0071604117, + 0.026616706, + 0.0042945594, + 0.027606677, + 0.016998243, + 0.04967852, + 0.0035183325, + 0.01577203, + -0.016345764, + 0.01644701, + -0.039733816, + -0.016357012, + 0.00078044547 + ], + "Category": "Boutique", + "Tags": [ + "concierge", + "view", + "air conditioning" + ] + }, + { + "@search.action": "mergeOrUpload", + "HotelId": "48", + "HotelName": "Nordick's Valley Motel", + "Description": "Only 90 miles (about 2 hours) from the nation's capital and nearby most everything the historic valley has to offer. Hiking? Wine Tasting? Exploring the caverns? It's all nearby and we have specially priced packages to help make our B&B your home base for fun while visiting the valley.", + "DescriptionVector": [ + -0.06868838, + -0.01605626, + 0.03267631, + 0.005335447, + -0.03286424, + -0.012896689, + 0.04641868, + 0.04179091, + -0.011739746, + 0.010717876, + -0.014094742, + 0.017618427, + -0.043952104, + 0.013507461, + 0.038666576, + 0.03366294, + 0.007687507, + 0.040592857, + 0.04291849, + 0.023244578, + -0.026427642, + -0.01954471, + -0.04961349, + 0.0071707, + 0.05722465, + -0.01691369, + -0.011446105, + 0.015292794, + 0.002882081, + -0.04900272, + -0.030256713, + -0.036199994, + -0.025370535, + -0.058822054, + -0.008997143, + 0.04468033, + -0.0004114637, + -0.014646786, + 0.011610543, + 0.009643152, + -0.0017794612, + 0.025605448, + 0.0058405087, + -0.025957815, + -0.0032182995, + -0.010265671, + 0.012767487, + 0.04561998, + -0.026474623, + -0.0195682, + -0.03422673, + -0.05398286, + -0.047146913, + -0.05966774, + -0.03704568, + 0.026944447, + -0.030890975, + -0.02050785, + 0.024430886, + 0.013671899, + 0.029035168, + -0.022962684, + 0.017418751, + 0.05619104, + 0.005655515, + 0.047945615, + -0.0013507461, + 0.018792989, + -0.0045925365, + -0.00863303, + 0.013190329, + 0.03953575, + -0.018699024, + 0.0048949863, + 0.035424784, + 0.030632572, + -0.0168902, + -0.040029068, + -0.008950161, + -0.038267225, + -0.0026530414, + -0.019098375, + 0.03345152, + -0.0034678937, + -0.017219076, + -0.0399351, + 0.009443477, + 0.036223486, + -0.019603437, + -0.0021655983, + 0.026357166, + 0.0006882198, + 0.020601815, + -0.05356002, + -0.030444643, + 0.045925368, + 0.03305217, + -0.0019248131, + -0.034320697, + 0.05703672, + 0.027014922, + -0.058963004, + 0.002802798, + 0.0004624837, + 0.046230752, + 0.023044903, + 0.048157036, + 0.06469487, + 0.015163593, + 0.008327643, + -0.078225814, + -0.009519824, + -0.002112743, + 0.03042115, + -0.022116998, + 0.04028747, + 0.0050829165, + -0.03267631, + 0.02621622, + 0.01841713, + -0.01273225, + -0.010988026, + -0.042025823, + -0.032981697, + -0.007769726, + -0.029434519, + -0.05153977, + 0.02452485, + -0.03596508, + -0.02605178, + 0.008680012, + -0.03037417, + 0.09894509, + 0.024407394, + -0.0043135784, + -0.017453989, + -0.0019982234, + -0.0076757614, + -0.030045291, + 0.014764242, + -0.00015939171, + -0.011493088, + 0.013084618, + -0.009308402, + 0.038666576, + 0.053841915, + 0.004842131, + 0.011363885, + 0.0076816343, + 0.025887342, + -0.00080897944, + 0.020190718, + -0.009960284, + -0.031031923, + -0.055768196, + -0.026098764, + -0.021623682, + 0.023949316, + -0.012039259, + 0.05050616, + -0.032253467, + 0.007147209, + 0.021964306, + -0.036505383, + 0.023902332, + 0.012238934, + -0.011093737, + 0.014364891, + -0.042260733, + -0.018511094, + 0.012556066, + -0.035354313, + 0.011328649, + 0.011880693, + 0.0025532038, + 0.013436987, + 0.011011517, + 0.03396833, + -0.0054910765, + -0.0065716733, + 0.013566189, + -0.015175339, + 0.023174105, + -0.014987409, + -0.014494093, + 0.00067757536, + 0.003629396, + 0.039394803, + -0.026920957, + 0.016878454, + -0.044962227, + -0.021130366, + -0.024924202, + 9.87366e-05, + 0.0007553901, + -0.03453212, + -0.009901556, + -0.010988026, + 0.0175597, + 0.028706292, + -0.054781564, + -0.03248838, + 0.07333964, + -0.04545554, + -0.043364823, + -0.027531728, + 0.018099997, + 0.010805969, + -0.00055718276, + -0.019450745, + -0.0034355933, + 0.029575467, + 0.025605448, + 0.036129523, + 0.012180206, + -0.030115765, + -0.044750806, + 0.042707067, + 0.021012912, + 0.007963529, + -0.04616028, + 0.01538676, + 0.022739517, + 0.053043213, + 0.047828157, + 0.034320697, + -0.03934782, + -0.018569821, + 0.015809601, + -0.0087269945, + 0.021435753, + -0.03495496, + -0.022669043, + -0.008151459, + -0.032605834, + 0.024642307, + 0.018992664, + -0.0010541693, + 0.01235639, + -0.010083613, + -0.055016477, + -0.029011676, + 0.0305621, + 0.033733416, + 0.0067830947, + -0.01672576, + 0.005720116, + -0.021752885, + 0.053137176, + 0.018511094, + -0.048039578, + -0.015398505, + -0.01908663, + -0.0436937, + -0.014999154, + 0.023385527, + -0.02093069, + 0.0044457163, + 0.026920957, + -0.01624419, + 0.026451131, + 0.033991817, + -0.02562894, + -0.018605059, + -0.010835333, + -0.021987798, + 0.025159115, + -0.0039670826, + -0.025417518, + -0.054640617, + -0.034649573, + -0.029998308, + 0.006178195, + -0.010571056, + -0.0006595899, + -0.044774298, + -0.0032036174, + -0.020472612, + 0.04312991, + -0.007746235, + -0.010300907, + 0.0023359098, + 0.0032388542, + 0.015398505, + -0.030233221, + -0.021905579, + 0.04103919, + 0.038431663, + -0.019438999, + -0.006548182, + -0.04564347, + 0.03702219, + -0.009831082, + -0.009883937, + 0.028588835, + -0.01986184, + 0.028729782, + -0.031501748, + 0.020519596, + 0.021153858, + -0.0059051095, + 0.04157949, + 0.017994286, + 0.0043429425, + 0.03171317, + 0.014000777, + -0.021071639, + -0.008186696, + 0.0010460941, + 0.021999544, + 0.019215832, + 0.023068395, + -0.0129084345, + 0.005746544, + 0.018746007, + -0.038807523, + -0.06173497, + -0.020590069, + 0.063520305, + -0.025417518, + 0.036975205, + 0.02562894, + 0.008433354, + -0.0055997237, + -0.046089806, + -0.013166838, + 0.052385457, + -0.021494482, + -0.0025047532, + -0.009284911, + 0.027085396, + -0.021529717, + -0.02833043, + 0.05600311, + -0.01900441, + -0.015046136, + -0.038995452, + -0.011440232, + 0.03840817, + 0.059385847, + -0.05168072, + -0.010148214, + -0.036223486, + 0.009478714, + -0.026192728, + 0.039629716, + 0.008398117, + 0.029223097, + 0.0071237176, + -0.0039230366, + -0.07084957, + 0.001154741, + 0.0012428332, + 0.020672288, + -0.0023917016, + -0.007998766, + 0.053700965, + -0.044586368, + -0.033334065, + 0.0024504296, + -0.029411027, + -0.012180206, + 0.02983387, + -0.017512716, + 0.012720505, + 0.03706917, + 0.019239323, + 0.004977206, + -0.0034091657, + -0.01281447, + 0.03020973, + -0.0032417907, + 0.01262654, + -0.0025091576, + 0.018581567, + 0.053653985, + 0.06577546, + 0.011804346, + -0.0054176664, + -0.01758319, + -0.016385138, + -0.031478256, + 0.018276181, + 0.0033298829, + 0.047640227, + -0.05698974, + 0.011857201, + -0.046653595, + -0.061828934, + 0.006430726, + 0.039700188, + -0.0132373115, + -0.06333237, + -0.008685885, + -0.024242956, + -0.032605834, + 0.029199608, + -0.015363269, + -0.0053824293, + 0.045126665, + 0.027531728, + -0.03514289, + -0.058070336, + -0.014905189, + 0.012591302, + 0.0170194, + -0.012509083, + -0.03363945, + -0.02605178, + 0.02508864, + -0.014881698, + -0.055768196, + 0.052291494, + -0.040733803, + 0.01034789, + 0.03685775, + -0.015797857, + -0.037585977, + 0.03587112, + -0.022175727, + 0.0123328995, + 0.059714723, + 0.015504216, + -0.00021142112, + -0.06572848, + 0.0016796234, + 0.02945801, + -0.013319531, + -0.08395768, + -0.026944447, + -0.0009792909, + -0.02230493, + 0.036740292, + 0.062862545, + 0.00403462, + 0.010048376, + -0.06474185, + -0.014447111, + 0.014752496, + -0.007664016, + 0.0070003886, + -0.029129133, + -0.05191563, + 0.012485592, + 0.031854115, + 0.009696008, + 0.03897196, + -0.0019321542, + -0.01747748, + -0.030820502, + 0.019204086, + -0.027813625, + -0.03345152, + -0.04620726, + -0.00024555682, + 0.03932433, + 0.00018701227, + -0.01793556, + 0.019626928, + -0.018053016, + 0.01726606, + 0.008797468, + -0.046653595, + 0.013472224, + -0.00175597, + -0.039817646, + 0.0058170175, + 0.011810219, + 0.025605448, + -0.049378578, + 0.005203309, + -0.028212976, + -0.01586833, + -0.008386372, + -0.039230365, + -0.015997533, + -0.05891602, + 0.040216997, + -0.033733416, + -0.01720733, + 0.002265436, + -0.004586664, + -0.0042284224, + 0.039606225, + -0.00702388, + -0.104395054, + -0.02385535, + -0.010835333, + -0.06704399, + -0.028424395, + 0.039206874, + 0.014916935, + -0.019626928, + -0.01064153, + 0.010300907, + 0.027296817, + -0.05088202, + 0.044327963, + 0.043482278, + -0.014458856, + -0.0065775462, + -0.038619593, + 0.017042892, + 0.00021637631, + 0.007687507, + -0.0063308883, + 0.014517584, + 0.013260803, + 0.0008574301, + 0.021048147, + -0.013272548, + -0.015457233, + -0.0043634973, + -0.006048993, + -0.0074643404, + 0.013354768, + 0.0074291034, + 0.027461255, + 0.004008192, + 0.046818033, + 0.021153858, + 0.013225566, + 0.010506456, + 0.0044310344, + -0.019955805, + 0.03481401, + 0.009543315, + -0.039512258, + 0.031595714, + 0.0008449504, + -0.018652042, + -0.019603437, + -0.033005185, + -0.00601082, + -0.010130595, + 0.049331598, + 0.0071824454, + -0.018769497, + 0.013906812, + 0.01804127, + 0.002895295, + 0.00038063145, + -0.011628162, + 0.01538676, + -0.00027749024, + -0.027226344, + 0.00060526637, + 0.020601815, + 0.012708759, + 0.012931925, + -0.003629396, + 0.051962614, + -0.02945801, + -0.024172483, + 0.017512716, + 0.0037292338, + -0.0057582892, + 0.014541076, + -0.022845227, + 0.011974658, + 0.010342017, + 0.0029716415, + 0.024830237, + 0.008450972, + 0.057271633, + 0.02184685, + 0.034297206, + 0.037867874, + -0.014353146, + -0.017042892, + 0.024618816, + 0.037092663, + -0.0054763947, + -0.00027969253, + 0.05417079, + 0.0008383435, + 0.032840747, + -0.041438542, + -0.017324787, + 0.00765227, + 0.006307397, + -0.00034245817, + -0.018945683, + -0.008544937, + -0.0019747321, + -0.061687987, + 0.026944447, + -0.041250613, + -0.0024871347, + -0.0019497726, + -0.025347045, + 0.025981307, + 0.0021465118, + 0.0050858525, + -0.013601426, + -0.0066245287, + -0.0010049845, + -0.04009954, + -0.039042436, + -0.017148603, + 0.013577934, + -0.01798254, + 0.0047011836, + 0.016326409, + -0.021705903, + -0.007164827, + -0.0215767, + 0.018546332, + 0.00631327, + 0.0050418065, + 0.0031889353, + -0.011181829, + 0.028541852, + -0.01795905, + -0.0026222093, + 0.02098942, + -0.019826604, + -0.009836955, + -0.022763008, + -0.016878454, + 0.04580791, + -0.010676767, + -0.0058757453, + 0.015844839, + 0.0023461871, + 0.02640415, + -0.01793556, + -0.04291849, + -0.008186696, + -0.009226183, + -0.0026780008, + 0.05722465, + 0.049472544, + -0.040216997, + -0.028283449, + -0.01871077, + -0.009754736, + -0.002808671, + -0.018428875, + -0.012943671, + 0.014411873, + 0.018781243, + -0.0064718355, + 0.008544937, + -0.0050917254, + 0.018804735, + 0.015927058, + -0.003905418, + -0.01833491, + -0.03932433, + 0.019615183, + 0.028447887, + 0.01292018, + 0.01795905, + 0.012051004, + 0.027860606, + -0.008169077, + -0.020519596, + -0.033287082, + -0.0003586084, + 0.0010805968, + 0.017677156, + -0.015363269, + 0.03114938, + 0.00601082, + -0.0024093199, + 0.006800713, + -0.0116810175, + -0.02335029, + 0.044562876, + 0.05929188, + 0.024665799, + 0.020178972, + 0.0012443014, + -0.000754656, + 0.009185073, + 0.04446891, + 9.937894e-05, + 0.020860218, + 0.0059491554, + 0.050929, + -0.0029877916, + -0.0075583053, + 0.039841138, + -0.020061515, + -0.02098942, + 0.017806357, + -0.0017089874, + 0.0015445488, + -0.004636583, + -0.011181829, + -0.024289938, + 0.0037938347, + 0.011980531, + 0.033287082, + -0.02887073, + -0.023127122, + -0.022563333, + 0.019450745, + -0.091474876, + 0.00072272256, + -0.028706292, + 0.02042563, + -0.023608692, + 0.0017941432, + -0.029223097, + -0.023902332, + -0.017066384, + 0.014576312, + -0.017042892, + -0.02393757, + -0.019027902, + -0.034673065, + 0.0037321702, + -0.017089874, + -0.034696557, + -0.021459244, + -0.004848004, + 0.018652042, + -0.050036334, + -0.025347045, + 0.027813625, + -0.0012068623, + 0.0019262814, + -0.00055057585, + 0.012156715, + 0.015985787, + -0.04942556, + -0.00038283374, + 0.07733315, + 0.025159115, + -0.031501748, + 0.01139325, + 0.045103174, + 0.011316903, + 0.011669272, + -0.003861372, + -0.0082336785, + -0.057459563, + -0.025206096, + 0.015551198, + 0.015410251, + 0.003723361, + -0.008245424, + 0.016467357, + -0.00859192, + 0.02927008, + -0.025558464, + -0.07315171, + 0.021553209, + -0.013601426, + 0.008022257, + -0.013824592, + -0.03666982, + 0.014317908, + -0.026920957, + 0.004298896, + 0.013319531, + -0.0027969254, + -0.004122712, + -0.007875437, + -0.013178583, + -8.2448736e-05, + 0.03516638, + -0.0038525627, + 0.050083317, + 0.008409862, + 0.025981307, + -0.018323164, + -0.030632572, + 0.0065658004, + 0.035048924, + -0.03363945, + -0.029669432, + 0.012884943, + -0.0032770275, + 0.016984165, + -0.050224263, + 0.017007655, + -0.019579945, + 0.046771053, + -0.004798085, + -0.0046835653, + -0.00010837168, + -0.0041843764, + 0.019016156, + 0.00075612415, + -0.015797857, + 0.016114987, + 0.0018337846, + -0.019180594, + -0.0313608, + 0.02544101, + -0.056378968, + 0.009696008, + -0.045901876, + -0.005582105, + -0.05647293, + 0.028894221, + 0.025605448, + 0.042307716, + -0.025041658, + 0.028259957, + -0.029622449, + 0.039441787, + -0.0056525785, + -0.033287082, + -0.02640415, + -0.004921414, + -0.037398048, + -0.012191951, + -0.0125795575, + -0.0019468362, + -0.030350678, + 0.032441396, + 0.021811614, + -0.021705903, + 0.012426864, + 0.037867874, + -0.011510706, + -0.026897466, + 0.013507461, + -0.03589461, + 0.0037145517, + -0.019333288, + -0.016526084, + -0.0012949544, + 0.0021024656, + -0.014576312, + -0.05910395, + -0.03154873, + 0.003077352, + 0.022234455, + 0.0038231986, + -0.014364891, + -0.0034062292, + 0.018111743, + 0.0209072, + 0.014846462, + 0.03683426, + 0.035283837, + 0.022023033, + 0.005869873, + -0.023679167, + 0.0011143655, + -0.032464888, + 0.023808368, + 0.0077403625, + -0.006882932, + -0.008803341, + 0.0039142272, + 0.010753114, + -0.009143963, + 0.026075272, + -0.022187473, + -0.023925824, + 0.00034355934, + -0.016279427, + 0.028823746, + -0.011769109, + -0.007628779, + -0.034461644, + 0.0023872969, + 0.043035947, + 0.007922419, + 0.016173717, + 0.013472224, + 0.00015535415, + -0.026756518, + 0.029434519, + -0.07380947, + -0.021917323, + 0.034508627, + 0.024853729, + -0.009766482, + -0.037679944, + 0.009613789, + -0.041180138, + 0.016114987, + -0.033615958, + 0.0134839695, + 0.017771121, + -0.0013903875, + 0.02034341, + 0.0035970956, + 0.016279427, + 0.003033306, + 0.045878384, + 0.024642307, + 0.021071639, + -1.6804033e-05, + -0.00015691412, + 0.018428875, + 0.014975663, + -0.017242568, + 0.01425918, + 0.012931925, + 0.040804278, + -0.024289938, + 0.032206483, + -0.00034245817, + -0.006172322, + -0.015057882, + 0.018276181, + 0.001198053, + -0.018781243, + -0.012532575, + 0.02109513, + -0.010083613, + -0.051304862, + -0.010130595, + -0.055909142, + 0.0001237878, + 0.029974818, + 0.009772355, + 0.009519824, + -0.021635428, + 0.03627047, + -0.021377025, + -0.02385535, + -0.01881648, + 0.018064762, + -0.022363657, + 0.039982084, + 0.009108727, + -0.005050616, + -0.016044514, + -0.020543085, + 0.020719271, + -0.0029305317, + -0.0012773359, + -0.008820959, + -0.005567423, + -0.009725372, + 0.04052238, + -0.0064542172, + 0.02736729, + 0.01191593, + -0.0049390323, + 0.01262654, + -0.025511483, + -0.04031096, + 0.03589461, + -0.05017728, + -0.008944288, + 0.013519206, + -0.006912296, + -0.010712003, + 0.021705903, + -0.017219076, + 0.019274559, + -0.0003224539, + 0.0093025295, + -0.00031052477, + -0.025910834, + 0.047240876, + 0.020648796, + 0.0073586297, + -0.014129979, + -0.015645163, + 0.0037850256, + -0.019709148, + 0.014376637, + -0.009167455, + 3.854031e-05, + 0.0013030295, + 0.038478646, + 0.008832705, + 0.021153858, + 0.023843605, + 0.010571056, + 0.009977902, + 0.002569354, + -0.004980142, + -0.00884445, + 0.005438221, + 0.039817646, + 0.023456, + -0.023714403, + -0.048204016, + 0.028823746, + 0.06056041, + -0.007658143, + 0.03227696, + -0.0064718355, + 0.018792989, + -0.036340944, + 0.019204086, + 0.03495496, + -0.013707137, + 0.008433354, + -0.016326409, + 0.012191951, + -0.014482347, + 0.013648408, + 0.009143963, + 0.026873974, + 0.047381822, + -0.007294029, + -0.0032946458, + -0.008909051, + -0.03441466, + -0.0071119717, + 0.0075876694, + 0.0033063914, + 0.027108887, + -0.009014762, + 0.032958206, + 0.0031155252, + 0.0041990583, + 0.0170194, + -0.04294198, + -0.017759375, + 0.015856585, + -0.040545873, + -0.00418144, + -0.030890975, + 0.0044545257, + -0.006788967, + 0.024266448, + 0.034461644, + -0.0123328995, + -0.013765864, + 0.016584814, + 0.007006261, + -0.029763397, + 0.014705514, + -0.009619662, + -0.0009257015, + 0.002184685, + -0.028400905, + 0.0006540841, + -6.4119145e-06, + 0.01954471, + 0.016984165, + -0.001952709, + -0.0066186558, + -0.0114343595, + -0.006031375, + 0.024172483, + -0.014846462, + -0.006066612, + -0.0007447456, + -0.014881698, + -0.014188707, + 0.016561322, + 0.013671899, + 0.035072416, + 0.023150614, + 0.05074107, + 0.006583419, + -0.018933937, + -0.013131601, + -0.0046630106, + 0.029904343, + -0.020460866, + -0.026920957, + -0.009437604, + -0.0032975823, + -0.005552741, + 0.023174105, + 0.022856973, + -0.01908663, + -0.037891366, + -0.04446891, + 0.01624419, + 0.026756518, + 0.010864696, + 0.010060122, + 0.038267225, + 0.01463504, + -0.005147517, + 0.024102008, + -0.0046571377, + -0.023596946, + -0.04031096, + 0.005623215, + -0.0007454797, + 0.018076506, + 0.03800882, + 0.0069768974, + -0.032417905, + 0.019873586, + 0.008462718, + -0.02833043, + -0.020977674, + 0.021929068, + 0.0106943855, + -0.010946916, + 0.03232394, + -0.00038246668, + -0.0058170175, + -0.029223097, + 0.017031146, + 0.003156635, + 0.013871575, + 0.0034796393, + 0.056238018, + 0.013895066, + 0.01938027, + -0.0010071867, + 0.0076816343, + 0.038854506, + 0.0026970876, + -0.0089736525, + -0.0063308883, + -0.034179747, + 0.029551975, + 0.0124151185, + -0.006189941, + 0.032746784, + -0.028447887, + -0.0071237176, + 0.025534974, + 0.010295034, + 0.016502593, + 0.0015122483, + -0.021083385, + -0.020777998, + 0.015245812, + -0.0042695324, + -0.017818103, + -0.026122255, + 0.0013551507, + -0.011117227, + -0.008785723, + -0.017794611, + -0.003970019, + -0.023326797, + 0.03596508, + 0.01881648, + -0.018652042, + -0.021647174, + -0.009989648, + 0.0035090034, + -0.020120244, + 0.0064659626, + 0.010841206, + 0.020531341, + 0.012403373, + 0.012697013, + 0.006060739, + 0.0061312127, + -0.017418751, + 0.015445488, + -0.015234067, + 0.007928292, + 0.059996616, + -0.01662005, + -0.020860218, + -0.0059609013, + -0.009907429, + 0.032253467, + -0.031266835, + -0.019720893, + -0.012027513, + 0.01131103, + 0.010606294, + 0.0011503365, + 0.0019996916, + 0.0064953268, + 0.0023770195, + -0.02452485, + -0.015856585, + 0.0031830624, + -0.026498115, + 0.008621284, + 0.00873874, + -0.034438152, + -0.013425241, + -0.010770732, + -0.03455561, + 0.0038349442, + 0.0019130675, + -0.01774763, + 0.017994286, + -0.01766541, + -0.0036528872, + -0.018640297, + -0.014024268, + 0.011052627, + 0.023620438, + -0.0065658004, + 0.03436768, + -0.017677156, + 0.024031535, + 0.0057318616, + 0.042589612, + -0.0013845147, + 0.004601346, + -0.019756129, + -0.012767487, + 0.029387537, + -0.009566806, + -0.031619202, + 0.0058111446, + 0.029857362, + -0.03152524, + 0.024736272, + 0.015187085, + 0.003946528, + -0.019885331, + -0.032934714, + 0.03230045, + -0.057976373, + 0.027273325, + 0.0088092135, + 0.0033504376, + -0.011868947, + 0.003925973, + 0.013319531, + 0.013096364, + -0.05525139, + -0.0029143814, + 0.014623295, + -0.026756518, + 0.041156646, + -0.031948082, + -0.029340554, + 0.016361646, + -0.033498503, + 0.0113521395, + -0.007822582, + -0.007945911, + 0.013918557, + 0.017970797, + -0.05074107, + -0.0030303695, + 0.017806357, + -0.012685267, + -0.00015113308, + 0.033357557, + -0.016772743, + 0.035448276, + 0.014423619, + 0.008298279, + -0.00877985, + -0.0029877916, + 0.028776765, + -0.025487991, + 0.005802335, + -0.01385983, + 0.0049595875, + -0.00030850602, + 0.027249834, + 0.020073261, + -0.03363945, + 0.023021411, + 0.0035236855, + 0.004848004, + 0.024008043, + -0.0137306275, + -0.013319531, + 0.013178583, + -0.026850482, + -0.03342803, + 0.06253367, + 0.012708759, + 0.022257946, + -0.009919175, + -0.02385535, + -0.011299285, + 0.00863303, + 0.04108617, + 0.010747241, + -0.030350678, + 0.02320934, + -0.030256713, + -0.026380658, + 0.029857362, + -0.052291494, + 0.006800713, + -0.01262654, + 0.020038025, + 0.0055409954, + -0.0031037796, + -0.0046718195, + 0.02544101, + 0.017336532, + -0.0014124106, + 0.040193506, + -0.016655287, + 0.0017383515, + -0.0115987975, + 0.033615958, + 0.033545487, + 0.001444711, + -0.016232444, + 0.007476086, + -0.011117227, + -0.021893833, + 0.0013639599, + 0.0005604862, + -0.044140033, + 0.034297206, + 0.013307786, + -0.040945224, + -0.013942049, + 0.0675608, + -0.0014549885, + -0.023690911, + -0.0040463656, + 0.022046525, + 0.017125111, + 0.02621622, + 0.013636663, + -0.0060049472, + -0.00974299, + 0.0040170015, + -0.0038202624, + -0.008333516, + 0.0021817486, + 0.002009969, + 0.028048536, + -0.0037116155, + -0.014541076, + -0.017160349, + -0.0075876694, + -0.012344644, + 0.003934782, + 0.015786111, + 0.010265671, + 0.017794611, + -0.0012545788, + -0.023784876, + -0.012180206, + -0.044703823, + -0.018064762, + 0.006336761, + -0.0059051095, + 0.014834716, + 0.01929805, + -0.024430886, + -0.02659208, + -0.008903178, + -0.014682023, + -0.010600421, + -0.01814698, + -0.028964695, + -0.020296428, + 0.03356898, + -0.012368136, + 0.012967163, + 0.008257169, + 0.009977902, + -0.004290087, + 0.0007781472, + -0.018734261, + -0.03232394, + 0.0042196135, + -0.018076506, + -0.0036910605, + 0.026615571, + -0.019168848, + 0.026169237, + -0.022070017, + -0.0014417747, + -0.018182216, + -0.010835333, + 0.0006588558, + -0.04087475, + 0.015128356, + -0.007564178, + -0.015750874, + -0.01154007, + 0.03020973, + 0.022210963, + -0.033169627, + -0.035800643, + -0.018311419, + -0.011792601, + -0.00048744315, + -0.011545943, + -0.019697402, + 0.017430497, + -0.031948082, + 0.0047011836, + -0.023115376, + 0.046865016, + 0.012051004, + -0.027132379, + 0.007258792, + -0.009096981, + -0.0041961223, + -0.01358968, + 0.038337696, + 0.0055703595, + 0.011551815, + -0.0155277075, + -0.002061356, + -0.013096364, + -0.019063137, + 0.0064953268, + 0.00054543716, + -0.0041050934, + -0.007752108, + -0.011639908, + -0.03683426, + -0.022833481, + 0.004833322, + -0.014024268, + -0.010259798, + -0.0034972578, + -0.010623911, + -0.015163593, + -0.008086858, + -0.0027749024, + -0.032182995, + -0.0281425, + -0.037280593, + 0.0134017505, + 0.009378877, + 0.0058405087, + -0.016279427, + 0.0115987975, + -0.013566189, + 0.029387537, + -0.003150762, + 0.017794611, + -0.0028629943, + -0.036223486, + 0.0056525785, + -0.016737506, + -0.0064953268, + 0.021330042, + 0.03114938, + -0.021083385, + -0.021236077, + 0.026169237, + 0.015328032, + -0.06690304, + -0.014764242, + 0.014129979, + 0.024665799, + 0.0017868022, + -0.022586824, + -0.011692763, + -0.028847238, + 0.0038554992, + 0.00320949, + 0.007998766, + -0.03702219, + 0.06347332, + 8.6211e-05, + 0.032394417, + 0.007570051, + 0.020672288, + 0.013260803, + 0.040545873, + -0.029411027, + 0.0004745964, + 0.018381892, + 0.028025044, + -0.018734261, + 0.041555997, + -0.018558078, + -0.026639063, + -2.9960502e-05, + 0.017547954, + 0.022539841, + 0.032723293, + 0.014423619, + -0.035401292, + 0.024877219, + 0.05492251, + 0.0014674682, + 0.023385527, + -0.021517973, + 0.0019218768, + 0.03439117, + -0.012344644, + -0.06023153, + -0.030092273, + 0.002497412, + 0.03098494, + 0.0069357874, + -0.008644775, + 0.004157949, + 0.008380499, + 0.012556066, + -0.019932315, + -0.009919175, + 0.008562556, + -0.008644775, + 0.019016156, + 0.019027902, + -0.0020892518, + 0.048251, + -0.027085396, + -0.0049243504, + -0.018640297, + 0.034673065, + -0.00044266297, + -0.000776679, + 0.019603437, + 0.052385457, + 0.007282283, + 0.0024636434, + 0.0013558848, + -0.002149448, + -0.012591302, + 0.05567423, + 0.032629326, + 0.0458314, + -0.047146913, + -0.007387994, + -0.00061003806, + -0.0052649733, + 0.004125648, + -0.01758319, + -0.014588058, + -0.021635428, + 0.03462608, + -0.0037174881, + -0.010946916, + 0.012121478, + 0.020178972, + 0.017806357, + -0.016009277, + -0.0064953268, + -0.035824135, + 0.0057083704, + -0.035095908, + -0.009789973, + 0.02088371, + -0.048297983, + -0.005482267, + 0.007722744, + 0.02774315, + 0.007176573, + 0.0062897787, + 0.0120862415, + 0.018182216, + -0.04200233, + -0.00013342289, + 0.0023241641 + ], + "Category": "Boutique", + "Tags": [ + "continental breakfast", + "air conditioning", + "free wifi" + ] + }, + { + "@search.action": "mergeOrUpload", + "HotelId": "49", + "HotelName": "Swirling Currents Hotel", + "Description": "Spacious rooms, glamorous suites and residences, rooftop pool, walking access to shopping, dining, entertainment and the city center. Each room comes equipped with a microwave, a coffee maker and a minifridge. In-room entertainment includes complimentary W-Fi and flat-screen TVs.", + "DescriptionVector": [ + -0.042438243, + -0.016445654, + 0.031978894, + 0.014899005, + -0.034515843, + -0.018871332, + -0.0060530687, + 0.004514766, + 0.021452786, + -0.004854138, + -0.00635906, + 0.01942768, + 0.010164482, + -0.024857638, + -0.02872982, + 0.03224594, + -0.08630073, + 0.040235102, + 0.04464138, + 0.059195448, + -0.004511984, + 0.007293725, + -0.050516415, + -0.025814557, + -0.03353667, + 0.04793496, + -0.058171768, + 0.013619404, + 0.002342226, + 0.026081603, + -0.0065204008, + -0.02081855, + 0.055545803, + 0.0077165496, + 0.027906425, + -0.029642232, + 0.013552642, + -0.028173473, + 0.029976042, + 0.010615123, + 0.007377177, + 0.038388025, + -0.029152647, + 0.002190621, + 0.009535808, + -0.031244515, + -0.010186736, + 0.022331817, + 0.01183909, + 0.051139526, + -0.021920118, + 0.032824542, + -0.03467162, + -0.06738489, + -0.015433099, + 0.008617834, + -0.024011988, + -0.06168789, + 0.0066094166, + -0.012028248, + -0.016412271, + -0.004717833, + -0.00071351655, + 0.065782614, + -0.020150932, + -0.0036718983, + -0.013730674, + 0.05416606, + -0.016935239, + 0.007199146, + 0.01865992, + -0.008901571, + 0.00586391, + -0.019283028, + 0.014309276, + -0.004884737, + -0.02939744, + -0.012161772, + -0.015088163, + -0.017725253, + -0.033959493, + -0.026014842, + 0.013541515, + 0.064892456, + 0.018615412, + -0.0016440089, + -0.029019123, + -0.024857638, + -0.010370331, + 0.08242855, + 0.00936334, + 0.014008848, + -0.02939744, + -0.0045481464, + -0.060797732, + -0.013942086, + 0.050516415, + -0.009285452, + -0.034938667, + 0.102635115, + 0.054077044, + -0.07601942, + -0.01334123, + -0.007199146, + 0.041548084, + -0.04043539, + 0.044218555, + -0.016768334, + 0.054121554, + 0.008050359, + -0.100409724, + -0.01759173, + -0.039233677, + -0.022854785, + -0.01169444, + 0.006971043, + 0.026704714, + -0.0396565, + -0.021797722, + 0.011388448, + 0.026326397, + 0.03745336, + 0.0032212562, + -0.044730395, + 0.00016334036, + -0.069877334, + -0.015344083, + -0.015900431, + -0.03834352, + 0.02821798, + -0.032156926, + -0.006820829, + 0.003908346, + -0.0040696873, + 0.011271615, + 0.067028835, + 0.0042115557, + 0.0042477185, + -0.002353353, + -0.051451083, + 0.006670615, + 0.0075273914, + 0.021530675, + 0.015666766, + -0.00064223446, + -0.01865992, + 0.013096437, + 0.027505856, + -0.05812726, + 0.042838812, + -0.013051929, + 0.022109278, + 0.0043200436, + 0.0035578469, + -0.004717833, + -0.027327824, + -0.01198374, + 0.015666766, + -0.015533242, + 0.033803716, + 9.8925666e-05, + -0.026370905, + -0.04090272, + -0.016734954, + 0.0051184036, + 0.012974041, + -0.018637665, + 0.040079325, + -0.052474763, + -0.028574044, + 0.05109502, + -0.00897946, + -0.004648289, + 0.040546656, + 0.03823225, + 0.031177754, + 0.021964626, + 0.012751501, + 0.008028105, + -0.015855923, + 0.00059146766, + -0.025347224, + 0.050783463, + -0.018938093, + -0.013908705, + 0.019071616, + -0.056569487, + 0.013196579, + 0.0064425124, + 0.030287595, + -0.025747795, + 0.00039744124, + -0.032958068, + 0.0027094157, + -0.045932107, + 0.059507005, + -0.0645809, + -0.007026678, + 0.0031990022, + -0.008812556, + -0.037742663, + -0.03745336, + 0.04855807, + -0.01660143, + -0.02599259, + -0.04882512, + -0.024590591, + 0.041147515, + 0.024568336, + 0.0021113413, + 0.018715553, + -0.017970048, + 0.01824822, + 0.04533125, + -0.017257921, + -0.008634524, + -0.02169758, + -0.0016273186, + 0.0062978617, + 0.052830826, + 0.00030338363, + 0.024902146, + 0.005541228, + 0.025681034, + -0.034248795, + 0.031311277, + -0.0039361636, + -0.030220835, + 0.014943513, + -0.0041002864, + -0.008957206, + -0.018926965, + -0.03700828, + -0.022632245, + -0.039055645, + 0.01920514, + -0.029864771, + 0.015377465, + 0.0074161217, + 0.0016328819, + -0.029820263, + -0.016746081, + 0.008985024, + 0.038388025, + 0.04753439, + 0.0014798862, + 0.031756356, + -0.03516121, + 0.029820263, + -0.032713275, + -0.03484965, + -0.0040029255, + 0.014887878, + -0.01620086, + 0.02209815, + 0.013330103, + 0.04468589, + 0.013241087, + 0.02763938, + -0.03262426, + 0.07054495, + 0.023655925, + 0.0039806715, + -0.027327824, + 0.01418688, + 0.0022490376, + -0.07695408, + 0.019383172, + -0.004595436, + -0.034538098, + -0.065871626, + -0.026726969, + -0.0013651394, + 0.015766907, + 0.036229394, + 0.0050738957, + 0.01689073, + -0.032490734, + -0.019861631, + 0.033625685, + 0.00045307606, + -0.033403147, + -0.00094787823, + -0.015010275, + 0.018014556, + 0.023767196, + 0.036073618, + 0.015566623, + -0.051228542, + -0.0020807423, + 0.022120405, + -0.025903573, + 0.0077221133, + -0.06426934, + 0.0055467915, + 0.02741684, + 0.028017696, + -0.012662485, + 0.031467054, + 0.003716406, + 0.0029152646, + 0.028707568, + -0.0065815994, + -0.008634524, + -0.044218555, + 0.019872759, + -0.020640519, + 0.0061810287, + 0.01993952, + -0.03213467, + -0.010820973, + 0.004890301, + 0.026860492, + -0.071034536, + -0.016868478, + -0.012862771, + 0.0066094166, + 0.03573981, + -0.010876607, + -0.03625165, + 0.0018429034, + 0.010047649, + 0.0038332392, + -0.018737808, + -0.015533242, + -0.043595444, + 0.017002001, + 0.03206791, + -0.05416606, + -0.0645809, + 0.08033668, + -0.007087876, + 0.021864485, + -0.016668193, + -0.027461348, + -0.0005031474, + -0.046599727, + 0.001794223, + -0.009652642, + 0.0019694727, + -0.012072756, + -0.03954523, + -0.026526682, + 0.02158631, + 0.02906363, + -0.050827973, + -0.022921545, + 0.016334383, + 0.05461114, + 0.017947793, + -0.0060864496, + -0.015188306, + -0.020184312, + 0.02022882, + 0.007466193, + -0.0018442943, + 0.05376549, + 0.010609561, + -0.063690744, + -0.007922399, + -0.015644511, + 0.03602911, + 0.039122406, + 0.0029903715, + 0.038321264, + 0.03324737, + -0.07152413, + 0.048157502, + -0.034248795, + -0.013096437, + 0.028685313, + 0.03602911, + 0.010726393, + 0.008005851, + -0.0317341, + 0.03489416, + 0.026771476, + 0.03478289, + -0.0135081345, + -0.019750362, + 0.014164626, + 0.0030654785, + 0.0015758564, + -0.02345564, + 0.005869474, + 0.0041308855, + -0.018192586, + 0.0026009278, + -0.024368051, + 0.04039088, + -0.015121545, + -0.014609704, + -0.020774042, + 0.011916978, + 0.002382561, + 0.024590591, + 0.02254323, + 0.004690015, + -0.0038582748, + -0.0017205068, + -0.0028846655, + -0.08095979, + 0.021630818, + -0.028418267, + -0.011154781, + -0.08403084, + -0.0057415133, + -0.07459517, + 0.0026440448, + -0.007377177, + -0.026704714, + -0.07330444, + 0.0011891943, + 0.0043756785, + -0.042505004, + -0.024345798, + -0.003040443, + -0.012762628, + 0.045108713, + -0.011288305, + 0.047890455, + 0.028418267, + -0.020351218, + 0.024078751, + -0.029775755, + 0.014164626, + 0.027861917, + -0.054922696, + -0.08082627, + 0.0065037105, + -0.02532497, + -0.0054689026, + 0.018481888, + 0.053008858, + -0.017213413, + 0.012873897, + -0.019783743, + -0.020874185, + -0.018493015, + -0.032334957, + -0.0039973618, + -0.017246794, + -0.06831956, + 0.0023783885, + -0.0055885175, + -0.004776249, + 0.007466193, + 0.06769645, + 0.014554069, + -0.03941171, + 0.004606563, + -0.028106712, + -0.015510988, + -0.01663481, + -0.015533242, + -0.00910742, + 0.023188593, + -0.0034187597, + -0.023477895, + 0.009975323, + -0.0012643012, + 0.00872354, + -0.016334383, + -0.007226963, + 0.014832243, + -0.050516415, + -0.051228542, + 0.025191447, + 0.0084620565, + -0.0012656922, + -0.060263637, + -0.05225222, + 0.025725542, + 0.031222261, + 0.023010561, + -0.06716236, + 0.011916978, + 0.03375921, + 0.03008731, + 0.008818119, + 0.0034716127, + 0.013741801, + -0.05421057, + -0.0051712566, + -0.008801429, + -0.020317836, + -0.05171813, + -0.021608565, + -0.05087248, + 0.0007510701, + -0.023366624, + 0.016223114, + -0.0037831678, + -0.008139375, + 0.035495017, + 0.015922686, + -0.059685037, + 0.0050405147, + 0.015900431, + 0.00504886, + -0.023144085, + 0.0021447223, + -0.0072992886, + 0.012384311, + -0.0040724687, + 0.04508646, + -0.030643659, + -0.024546083, + 0.022131532, + 0.007939089, + 0.010904425, + 0.003952854, + -0.026905, + 0.02158631, + 0.021742089, + 0.0317341, + -0.0052574906, + -0.015166052, + -0.0018707209, + -0.0373866, + 0.0033881606, + -0.0075997165, + 0.019349791, + -0.0027163702, + 0.015332957, + 0.027950933, + 0.0017664055, + -0.053053364, + 0.014887878, + -0.0023060634, + -0.018481888, + -0.010954496, + -0.0036440808, + 0.008940516, + 0.020673899, + 0.016690446, + -0.008934952, + -0.015766907, + 0.06720687, + -0.022309562, + 0.062355507, + -0.0055996445, + 0.008406421, + -0.047667913, + -0.02045136, + -0.023833957, + -0.018737808, + 0.0004965408, + 0.0056441524, + 0.020918693, + 0.015922686, + 0.01891584, + 0.028685313, + 0.004831884, + -0.009864054, + 0.0054522124, + 0.015154925, + 0.0043784603, + 0.0063924408, + 0.01653467, + -0.019227395, + 0.033981748, + -0.0064369487, + 0.021908993, + 0.024056496, + 0.019527823, + 0.030398866, + 0.039745517, + 0.0016314911, + -0.0076776054, + -0.0018915839, + 0.004879174, + -0.008823683, + -0.011266051, + 0.023121832, + -0.015855923, + 0.008200573, + -0.011627678, + -0.021541802, + 0.0066038533, + 0.013608277, + -0.010943369, + 0.070856504, + 0.028351504, + -0.011672186, + -0.038388025, + 0.022198293, + 0.00599187, + 0.034827396, + 0.017836524, + 0.05047191, + 0.004709488, + 0.029953787, + -0.03756463, + 0.011154781, + -0.010075466, + -0.03874409, + 0.005824966, + -0.031088738, + 0.026482174, + -0.018203713, + 0.0025703288, + -0.030843945, + -0.008884881, + 0.0064202584, + -0.009023968, + -0.01235093, + -0.013296722, + -0.007338233, + -0.038098726, + -0.012484454, + -0.008779175, + -0.013330103, + -0.00038735743, + -0.03456035, + 0.04339516, + -0.033692446, + -0.034649365, + 0.010409275, + -0.0006161556, + -0.007215836, + 0.0144873075, + 0.018626537, + 0.038833104, + 0.020840803, + 0.039589737, + -0.013719547, + -0.044396587, + 0.0013018548, + -0.04159259, + 0.048157502, + 0.030510135, + 0.058260784, + 0.008050359, + 0.0032880178, + 0.058750372, + -0.014398292, + 0.017881032, + -0.0077833114, + -0.019772615, + 0.009908562, + -0.0038304573, + -0.0004770686, + 0.004587091, + 0.014453926, + 0.018637665, + 0.046866775, + -0.023811704, + 0.0013498398, + 0.016746081, + 0.0033492162, + -0.016901858, + 0.016846223, + -0.03705279, + 0.0024785313, + 0.026504429, + 0.029820263, + -0.0030265343, + 0.023767196, + 0.036563203, + -0.023411132, + 0.025814557, + -0.015889306, + -0.002148895, + 0.0007246435, + 0.012484454, + -0.016390018, + 0.02129701, + -0.021397153, + 0.03903339, + 0.06876464, + 0.0033325257, + 0.0020626609, + 0.018782316, + 0.035450507, + 0.010047649, + 0.04226021, + -0.02019544, + -0.0022490376, + 0.026037097, + 0.008183883, + 0.017291302, + -0.056124408, + 0.008834809, + 0.0015313484, + -0.04159259, + 0.034449082, + 0.0032435101, + 0.039055645, + -0.0052686175, + -0.0018081317, + -0.0018289947, + -0.0042699724, + 0.016245367, + 0.0026273543, + -0.041214276, + 0.00060607184, + 0.0037108425, + 0.0186933, + -0.03698603, + 0.013174325, + 0.024657352, + 0.048469055, + -0.010409275, + 0.0027247153, + -0.014298148, + 0.02815122, + -0.037653647, + 0.029508708, + -0.018748935, + -0.005930672, + -0.013908705, + 0.001450678, + -0.024746368, + 0.013797436, + 0.004965408, + -0.01931641, + -0.0076497877, + 0.0054967203, + -0.009157492, + 0.004445222, + 0.032980323, + -0.0018526395, + 0.02209815, + -0.016935239, + -0.0041948655, + -0.023722688, + -0.01469872, + -0.01850414, + 0.04350643, + 0.012885025, + 0.006715123, + -0.008473183, + -0.0013727891, + 0.019917266, + 0.0006780494, + -0.00078584184, + -0.014921259, + -0.021864485, + -0.011894724, + 0.0009228426, + -0.02396748, + -0.018737808, + -0.02345564, + 0.030154074, + -0.034916412, + 0.018871332, + -0.043773476, + -0.057637673, + 0.020473614, + -0.047000296, + 0.0059139812, + 0.022009134, + -0.031110993, + -0.028774329, + -0.024167767, + 0.033158354, + 0.0132744685, + -0.02059601, + -0.0031461492, + 0.02056263, + 0.0045592734, + 0.009096293, + 0.004996007, + 0.032312702, + -0.03516121, + 0.0063089887, + 0.013997721, + -0.01784765, + -0.0050961496, + -0.008005851, + 0.030198582, + 0.029998295, + -0.011210416, + -0.024501575, + 0.0053159073, + -0.014342656, + -0.053453937, + 0.026593445, + -0.0016064554, + 0.04317262, + -0.009886308, + 0.024523828, + -0.010292442, + 0.019739235, + 0.00011300823, + -0.0036496443, + -0.027350077, + 0.005824966, + -0.011738948, + -0.0042950083, + -0.004973753, + 0.005187947, + -0.02792868, + -0.010275751, + 0.029041376, + -0.00084773556, + 0.015099291, + 0.04457462, + 0.007076749, + -0.0031350222, + 0.011099147, + 0.04159259, + 0.012506708, + 0.027995441, + 0.00896277, + -0.0075496454, + 0.01920514, + 0.014453926, + -0.042638525, + -0.021029962, + 0.033380892, + 0.0020668337, + 0.003502212, + -0.007922399, + 0.0066261073, + 0.0048597017, + 0.0012197935, + 0.0029820264, + -0.009079603, + -0.0030460064, + -0.027973188, + 0.00013734847, + 0.03910015, + 0.0051962924, + 0.015332957, + -0.020484742, + -0.012373184, + 0.014821116, + -0.029731248, + 0.015277321, + 0.010164482, + -0.016946366, + 0.032958068, + 0.00080531405, + -0.009674896, + 0.013152071, + 0.04628817, + 0.004050215, + 0.007087876, + 0.005797148, + 0.014809989, + -0.011227107, + -0.00030338363, + -0.012250788, + -0.023188593, + -0.019160632, + 0.014287022, + 0.006325679, + 0.0013797436, + 0.048335534, + -0.0014882315, + 0.02679373, + -0.022365198, + 0.020940946, + 0.018370617, + 0.019984027, + 0.020150932, + 0.008617834, + 0.0065927263, + -0.0373866, + 0.0039695445, + -0.004756777, + 0.0002465318, + -0.03496092, + 0.03262426, + -0.0011224325, + 0.00044264455, + 0.012829389, + 0.032223687, + -0.0025911918, + -0.013686166, + -0.007916835, + 0.028084457, + 0.032980323, + -0.008812556, + 0.008806992, + 0.009391158, + 0.021174613, + -0.05643596, + 0.014921259, + 0.012517835, + -0.0013560988, + 0.016134098, + -0.018737808, + 0.021675326, + -0.021285882, + 0.010509417, + -0.003911128, + 0.0016301002, + -0.0031600578, + 0.013007421, + 0.027238809, + -0.019728107, + -0.02815122, + 0.019616839, + -0.011154781, + 0.0271943, + -0.01195036, + 0.014720974, + -0.028596297, + 0.0043005715, + -0.020863058, + -0.032713275, + -5.333119e-05, + 0.02554751, + -0.012061629, + -0.008222827, + 0.012662485, + 0.012484454, + 0.009702712, + -0.024011988, + -0.024768623, + 0.0015077037, + 0.0061587747, + 0.02781741, + -0.019984027, + -0.010770901, + 0.00317953, + -0.016022827, + 0.009246508, + 0.039011136, + -0.038944375, + -0.036919266, + -0.018826824, + 0.0012184025, + -0.031978894, + 0.012818263, + -0.0038332392, + -0.011961486, + 0.0030237525, + 0.006002997, + 0.029931534, + 0.018826824, + 0.046377186, + 0.0042254645, + 0.0135303885, + -0.014453926, + 0.009074039, + -0.0033186171, + -0.009352214, + 0.030621406, + -0.017613985, + -0.0022309562, + 0.021352645, + -0.019561203, + 0.022242801, + -0.0023658706, + -0.0040335245, + -0.03240172, + 0.028907852, + 0.030866198, + 0.015021401, + -0.010214553, + -0.021486167, + 0.008222827, + -0.014520688, + -0.0009694368, + 0.04372897, + -0.005713696, + -0.0049209, + 0.013441373, + 0.016935239, + -0.012951787, + 0.010342513, + 0.013953213, + -0.011438519, + 0.08006963, + -0.017391445, + 0.015700147, + -0.0073437965, + -0.001139123, + 0.03785393, + 0.008584453, + -0.045041952, + 0.032268196, + -0.0036162634, + 0.026148366, + 0.024457067, + 0.045531537, + -0.0017385881, + 0.021397153, + 0.025614271, + -0.03631841, + -0.011538662, + -0.008606707, + 0.018203713, + 0.0076609147, + 0.031222261, + -0.008795865, + -0.003351998, + -0.020295583, + -0.0037442234, + 0.029664487, + -0.045976616, + -0.0020974327, + -0.03160058, + 0.044263065, + -0.015933814, + -0.0020904783, + -0.012150645, + -0.009352214, + -0.0073326696, + -0.021675326, + 0.02345564, + 0.028329251, + 0.006164338, + 0.045754075, + -0.03522797, + -0.008690159, + 0.0399458, + 0.025814557, + 0.00190132, + -0.00080948666, + -0.0651595, + -0.032935813, + 0.027505856, + 0.018559776, + -0.0031183318, + 0.030398866, + 0.012317549, + 0.025124686, + 0.03331413, + -0.007293725, + 0.004500857, + -0.034627113, + 0.040346373, + 0.016512414, + 0.035094444, + 0.0053298157, + 0.023589164, + -0.0041475757, + 0.01436491, + -0.008222827, + 0.00074272486, + 0.016367765, + -0.035784315, + -0.021797722, + -0.011894724, + -0.001177372, + -0.005824966, + 0.023121832, + 0.003029316, + 0.013430246, + -0.046199154, + 0.015922686, + -0.0037831678, + 0.025169194, + 0.03522797, + -0.031467054, + 0.025235955, + -0.007421685, + -0.006002997, + -0.009852927, + -0.04611014, + 0.023922972, + -0.0189826, + -0.030732675, + 0.024212275, + -0.016134098, + 0.020417979, + 0.005836093, + 0.014965767, + -0.014976894, + -0.013864198, + -0.021397153, + -0.02605935, + -0.02939744, + -0.053409427, + 0.012562343, + 0.01469872, + 0.030554643, + 0.024612844, + -0.028796583, + 0.015600003, + -0.014832243, + -0.0026565627, + -0.0076497877, + 0.0052018557, + 0.066717274, + -0.012684739, + 0.02390072, + -0.019639092, + 0.008289589, + -0.00051044946, + 0.013140945, + 0.028574044, + -0.030732675, + 0.007883454, + 0.014676466, + -0.014142372, + 0.03030985, + -0.0034883032, + 0.0054383036, + -0.029130392, + 0.030287595, + 0.01627875, + -0.0036162634, + -0.004740087, + 0.02283253, + 0.025280463, + 0.025903573, + 0.0055384464, + -0.041614845, + -0.005638589, + 0.0043005715, + 0.02169758, + -0.011661058, + -0.03082169, + -0.024991162, + -0.015677892, + 0.004584309, + 0.009524682, + 0.0291749, + 0.014209134, + 0.024612844, + 0.016434526, + 0.02928617, + 0.021085598, + 0.0076720417, + -0.024078751, + -0.027327824, + 0.0094913, + 0.016211987, + -0.004564837, + 0.01198374, + -0.00938003, + 0.04159259, + 0.008779175, + 0.017402573, + 0.013441373, + -0.0030793874, + -0.025703287, + -0.04366221, + -0.03306934, + -0.0075663356, + 0.010164482, + 0.00069126266, + 0.010971187, + 0.01123267, + -0.00010631466, + 0.0035300294, + 0.010542799, + 0.004612127, + 0.0020751788, + -0.020651646, + 0.016423399, + 0.033514418, + 0.010893298, + 0.030487882, + -0.015622257, + 0.029775755, + 0.008567763, + 0.004948717, + 0.01484337, + 0.028195728, + 0.0052713994, + -0.017224541, + -0.025213702, + -0.005869474, + -0.06391328, + -0.015600003, + -0.01821484, + 0.010320259, + -0.0110602025, + 0.014965767, + -0.035517268, + 0.010047649, + -0.012428819, + 0.032846797, + 0.026037097, + -0.023099577, + -0.0019430461, + -0.010487163, + -0.010019831, + 0.00899615, + -0.0052853078, + 0.007232527, + 0.014609704, + -0.022020262, + -0.018548649, + 0.027038522, + -0.0027483602, + -0.01748046, + -0.023477895, + 0.014064482, + 0.024011988, + -0.019138379, + -0.013930959, + -0.041548084, + -0.01627875, + 0.02206477, + 0.010476037, + -0.044841666, + -0.021374898, + 0.0017038163, + -0.014053356, + 0.012417692, + 0.020584883, + -0.021085598, + 0.014921259, + 0.005747077, + -0.017881032, + -0.03456035, + -0.053097874, + -0.008300715, + -0.011054639, + -0.03162283, + 0.0005358329, + 0.00092006085, + 0.016078463, + -0.0054216133, + -0.018804569, + -0.022921545, + 0.010036522, + -0.054700155, + 0.0034410136, + -0.02837376, + 0.01679059, + -0.01733581, + -0.0043868055, + -0.011405138, + -0.004434095, + 0.0074606296, + 0.025413986, + -0.012250788, + -0.013675039, + 0.022276182, + 0.0022267837, + 0.014264768, + 0.011438519, + 0.011082456, + -0.022131532, + 0.019372044, + 0.030799437, + 0.014809989, + -0.01627875, + 0.006153211, + -0.026215127, + -0.018459633, + -0.013919832, + 0.022409705, + 0.017881032, + 0.00246045, + 0.030131819, + -0.004770686, + -0.021029962, + -0.0050989315, + -0.013775182, + 0.00013326279, + 0.044441096, + -0.007455066, + -0.013719547, + 0.016223114, + -0.0066761784, + 0.0021711488, + 0.021942373, + -0.013474753, + -0.028596297, + -0.013730674, + 0.005641371, + -0.0006811788, + -0.0019889448, + 0.033803716, + 0.0028067767, + 0.005897291, + -0.0076275337, + 0.025480747, + 0.03943396, + 0.031266768, + -0.04012383, + -0.031244515, + -0.014720974, + 0.0027316697, + 0.021908993, + -0.029820263, + -0.0040474334, + 0.022587737, + -0.0005236628, + -0.0032880178, + -0.009574752, + -0.041548084, + -0.027172046, + -0.0028387667, + 0.0009951679, + 0.024234528, + -0.008473183, + 0.032780036, + 0.017213413, + 0.022164913, + -0.026838237, + 0.035762064, + -0.025347224, + -0.047000296, + 0.0135303885, + 0.03727533, + 0.0038582748, + -0.07579688, + 0.004381242, + 0.025970334, + 0.0020334527, + 0.016801717, + 0.009391158, + 0.021196866, + -0.014465054, + 0.0149991475, + 0.016134098, + -0.009435666, + -0.01447618, + -0.011961486, + -0.015633384, + 0.0076831686, + -0.0026009278, + 0.0018136952, + 0.026994014, + -0.007143511, + -0.024011988, + -0.0037803862, + 0.015811415, + -0.044552363, + 0.029130392, + 0.018637665, + -0.007176892, + -0.0029263915, + 0.022899292, + 0.010381457, + -0.005980743, + -0.034360066, + 0.022120405, + -0.008528818, + 0.01495464, + 0.033692446, + 0.0067318133, + -0.033047084, + -0.002834594, + -0.0016899076, + 0.0041336673, + 0.019739235, + 0.006831956, + 0.00014725841, + 0.015332957, + -0.03378146, + -0.03326962, + -0.012106137, + 0.045398016, + 0.010342513, + -0.0045592734, + -0.061999444, + -0.011388448, + -0.019616839, + 0.013118691, + -0.039901294, + 0.0031350222, + 0.02872982, + 0.038543805, + 0.03211242, + 0.0029403004, + 0.009424538, + 0.0043228255, + -0.0052992166, + -0.020440234, + 0.014988021, + 0.012929533, + -0.024256783, + 0.0019152287, + 0.012072756, + -0.010359203, + -0.014131244, + 0.0069543524, + -0.015032529, + 0.01436491, + -0.0069877333, + -0.02272126, + -0.024390304, + 0.0040585604, + -0.007377177, + 0.013574896, + 0.014932386, + 0.02844052, + 0.010693013, + -0.035895586, + -0.008256207, + 0.0416371, + 0.0020056353, + 0.013430246, + -0.026704714, + 0.02939744, + -0.06631671, + -0.036095873, + 0.002984808, + -0.03716406, + -0.02019544, + -0.01594494, + 0.036229394, + 0.032891307, + -0.0026885527, + 0.019627964, + -0.026415413, + 0.0009965587, + 0.030131819, + -0.014554069, + -0.0012851644, + -0.0044424403, + -0.0020028534, + -0.012962913, + -0.015711274, + 0.010492727, + -0.032090165, + 0.008373041, + 0.0027024613, + 0.052430253, + -0.00822839, + 0.013441373, + -0.006659488, + 0.027728396, + 0.0075107007, + -0.013630531, + 0.013396865, + -0.0050099157, + 0.0094913, + 0.0040724687, + -0.002278246, + 0.0043562064, + -0.028907852, + -0.01858203, + 0.028351504, + -0.01198374, + -0.021185739, + -0.0026106639, + 0.013063055, + -0.04944823, + 0.008912698, + 0.01108802, + 0.051362067, + 0.033336386, + 0.0064925835, + -0.01931641, + 0.0051712566, + 0.029775755, + -0.018871332, + 0.013074183, + -0.0043422976, + 0.026838237, + -0.01597832, + -0.0023018906, + 0.017324682, + -0.008284025, + -0.0021516767, + 0.003727533, + -0.01637889, + 0.03863282, + 0.001164854, + 0.031756356, + -0.0073215426, + 0.017714126, + 0.021753214, + 0.008367477, + 0.038766343, + -0.0006571863, + -0.028306996, + 0.031511564, + -0.012651358, + -0.0052380185, + 0.025970334, + 0.039567485, + 0.00258841, + 0.043973763, + 0.0063089887, + 0.011460773, + 0.030465627, + 0.00896277, + -0.044485603, + 0.015778035, + -0.006314552, + -0.0013060274, + -0.010642941, + 0.038165487, + 0.00021123845, + 0.032713275, + -0.009519118, + -0.037141807, + -0.039055645, + -0.008729103, + 0.009274324, + 0.019839376, + 0.0057192594, + -0.009213126, + 0.052830826, + 0.008478747, + 0.019182887, + -0.035205714, + -0.003491085, + 0.032935813, + 0.00061824196, + -0.017569477, + 0.0135081345, + 0.0053325975, + 0.001492404, + 0.0154887345, + 0.031289022, + 0.01594494, + 0.030220835, + 0.0050349515, + -0.011182599, + -0.031266768, + 0.01993952, + -0.012417692, + 0.0543886, + -0.039011136, + 0.0015132672, + 0.0029820264, + 0.01719116, + 0.018292729, + -0.0013463626, + -0.026370905, + 0.0325575, + -0.022632245, + 0.002912483, + -0.0232331, + 0.008406421, + -0.046822265, + -0.0018289947, + -0.040413134, + -0.018938093, + 0.01663481, + 0.02254323, + 0.007338233, + 0.011049075, + -0.028841091, + 0.021274755, + 0.023922972, + 0.009797292, + -0.030688167, + -0.017358065, + 0.0022434741, + -0.012684739 + ], + "Category": "Suite", + "Tags": [ + "air conditioning", + "laundry service", + "24-hour front desk service" + ] + }, + { + "@search.action": "mergeOrUpload", + "HotelId": "13", + "HotelName": "Luxury Lion Resort", + "Description": "Unmatched Luxury. Visit our downtown hotel to indulge in luxury accommodations. Moments from the stadium and transportation hubs, we feature the best in convenience and comfort.", + "DescriptionVector": [ + -0.0043867915, + -0.055055395, + 0.038483072, + 0.0319377, + -0.03757786, + -0.023976486, + -0.0436126, + 0.01048536, + 0.01361298, + -0.0047900747, + -0.009980531, + 0.0060753585, + -0.01002115, + -0.037740335, + 0.03226265, + 0.027086698, + -0.01874251, + 0.007897385, + 0.024858486, + 0.017071351, + 0.034235545, + -0.008680741, + 0.0021788892, + -0.0310557, + -0.008315175, + 0.030359384, + -0.028084751, + 0.00700378, + 0.020181563, + 0.018870167, + 0.0082977675, + -0.017257035, + 0.026483223, + -0.023593511, + 0.013438901, + 0.03834381, + 0.055055395, + 0.03713686, + 0.005698187, + -0.021910748, + -0.023233749, + 0.0804013, + -0.006237832, + 0.0064583323, + -0.004160489, + 0.0065975953, + -0.018359536, + 0.032123383, + 0.005999924, + 0.04312518, + -0.016491087, + -0.02583333, + -0.018475588, + -0.05788708, + -0.0023152512, + -0.0073751486, + 0.020494904, + -0.033075016, + 0.059511818, + -0.03453728, + -0.054034133, + -0.010160413, + -0.013705823, + 0.08959267, + -0.007050201, + 0.012475664, + -0.0026111854, + 0.031775225, + -0.02420859, + -0.025601223, + 0.041152284, + 0.009411873, + -0.0337017, + 0.0026155375, + 0.03411949, + 0.0010060318, + -0.01768643, + 0.022688301, + -0.0069283457, + -0.012150717, + -0.008622715, + -0.018614851, + -0.043728653, + 0.04695492, + -0.038483072, + -0.019357588, + -0.025972592, + -0.0003410861, + 0.0018539417, + 0.07227761, + 0.01878893, + 0.030823594, + 0.0070676086, + -0.025392327, + -0.016514298, + 0.047767285, + 0.02629754, + -0.04389113, + -2.9058505e-05, + 0.06847109, + 0.055426765, + -0.08207246, + 0.010920558, + 0.006208819, + -0.009655584, + -0.075434245, + 0.010642031, + -0.010113992, + 0.022699906, + 0.020030694, + -0.06693919, + 0.010392519, + -0.01403077, + 0.0088025965, + 0.013972743, + 0.013636191, + -0.00680649, + -0.04533018, + -0.022073222, + -0.03532644, + 0.020111931, + 0.010642031, + -0.010729071, + -0.042359233, + -0.02803833, + -0.058165606, + -0.009707808, + 6.0247665e-05, + -0.05059897, + -0.018556826, + -0.057283606, + -0.014170033, + -0.019984273, + -0.0054660817, + -0.025299486, + 0.027458066, + -0.027759803, + -0.017396297, + 0.008622715, + -0.0019743463, + -0.0306147, + -0.059372555, + 0.030800384, + -0.024719223, + -0.04238244, + -0.0020584846, + 0.02413896, + 0.032239437, + 0.002358771, + 0.03456049, + -0.027504487, + 0.010084978, + -0.0017901127, + -0.030916436, + -0.007160451, + -0.029686278, + -0.023001643, + -0.0038790612, + -0.021516168, + 0.024324644, + 0.0012243559, + 0.00021161482, + -0.00874457, + -0.019995878, + 0.058212027, + -0.004482535, + -0.032193016, + 0.011100439, + -0.071488455, + -0.0200423, + 0.036672648, + 0.0018771522, + -0.005497996, + 0.047117393, + 0.03794923, + 0.021249248, + -0.040386334, + 0.019972667, + 0.025090592, + 0.0054138578, + 0.017082956, + -0.05524108, + 0.079472885, + -0.013833481, + 0.006139187, + 0.04748876, + -0.035999544, + 0.02717954, + 0.013230006, + 0.056169502, + -0.010891545, + 0.005825845, + -0.038924072, + 0.027690172, + -0.01922993, + 0.012487269, + -0.027690172, + 0.036951177, + 0.004189502, + 6.527964e-05, + -0.01173873, + 0.018231878, + 0.0381117, + -0.02717954, + -0.014773508, + -0.019287957, + -0.029570226, + 0.019937852, + -0.024835275, + 0.007305517, + 0.011918611, + 0.011611071, + 0.024928117, + 0.02585654, + 0.023976486, + -0.043334074, + -0.036626227, + -0.026065433, + 0.042869862, + 0.03497828, + -0.016839245, + 0.0121159, + -0.0091971755, + 0.056215923, + -0.003539607, + 0.05222371, + -0.040316705, + -0.01043894, + -0.0018539417, + -0.010607216, + -0.022978432, + -0.037809964, + -0.033399966, + -0.03407307, + -0.023535484, + 0.043821495, + 0.015168087, + -0.027806224, + 0.03753144, + 0.022282116, + -0.013206796, + -0.003475778, + 0.003969002, + 0.03107891, + 0.02413896, + -0.03456049, + -0.00079568627, + 0.017280245, + 0.0535235, + 0.047952972, + -0.02244459, + 0.034258753, + -0.012812217, + -0.017373087, + 0.031287804, + 0.0101546105, + -0.018730905, + 0.027295593, + 0.03748502, + -0.029593436, + 0.0332607, + 0.03632449, + -0.017779272, + -0.01444856, + 0.00089795765, + -0.0058374503, + -0.002830235, + -0.0323787, + 0.0035134952, + 0.0003051823, + -0.07594488, + -0.032680437, + -0.053430658, + 0.03242512, + -0.040711284, + -0.060579505, + -0.039945334, + 0.03154312, + 0.043960758, + 0.012208743, + -0.040618442, + -0.047303077, + 0.043914337, + -0.022827564, + -0.027783014, + -0.020564536, + 0.046699602, + 0.021899143, + -0.058026344, + -0.025694065, + -0.015086849, + -0.033933807, + -0.018405957, + -0.024858486, + 0.0013774004, + -0.012858638, + -0.0019685437, + -0.045074865, + 0.030127278, + -0.009968926, + 0.04282344, + 0.06749625, + -0.02541554, + -0.02543875, + 0.013740638, + 0.02801512, + 0.033840965, + 0.012127506, + 0.045933653, + 0.0049438444, + -0.0034438635, + 0.02592617, + -0.0038906664, + -0.0043461733, + -0.0077175037, + 0.0058490555, + -0.010688453, + -0.003522199, + -0.025299486, + -0.017129377, + 0.055798132, + 0.02134209, + -0.028316855, + -0.009290018, + 0.029663067, + -0.018904982, + -0.0422896, + -0.020808248, + -0.022932012, + -0.030173698, + 0.018951405, + -0.009696202, + 0.02266509, + 0.020413669, + -0.03284291, + -0.015655508, + -0.027388435, + -0.011048216, + 0.013984349, + 0.026042223, + -0.024556749, + -0.028734645, + -0.008448636, + -0.02163222, + 0.0030811988, + -0.04790655, + 0.009231991, + 0.032564383, + 0.018011378, + 0.037763543, + -0.03279649, + -0.014309296, + 0.0352336, + 0.04748876, + -0.009997939, + -0.01381027, + 0.056215923, + 0.033492807, + -0.019125484, + -0.017767666, + -0.01361298, + 0.0032581792, + -0.023976486, + -0.0117851505, + 0.055937394, + 0.012220348, + -0.023222143, + -0.0006615003, + -0.009887689, + -0.009707808, + 0.008088873, + 0.0038268375, + 0.04827792, + 0.060347397, + -0.023314985, + 0.04345013, + 0.026947435, + 0.040502388, + -0.028827488, + -0.044192865, + 0.02462638, + 0.0037368967, + 0.018429168, + -0.026993856, + 0.0662893, + 0.01925314, + -0.012220348, + -0.026576066, + -0.0091971755, + 0.011239703, + -0.015121666, + -0.023790801, + 0.019717352, + -0.044703495, + -0.042405654, + 0.020402063, + 0.052966446, + 0.013311244, + 0.009539531, + -0.027574118, + 0.023071274, + -0.04403039, + -0.0009124643, + -0.036069177, + -0.009226189, + -0.029918384, + 0.011466006, + -0.052084446, + -0.020030694, + 0.034189124, + -0.007235885, + -0.0046856273, + -0.0049525485, + -0.025578013, + -0.00026891584, + 0.026831381, + 0.008761978, + -0.035512123, + 0.034189124, + 0.035651386, + 0.024278222, + 0.050970342, + 0.014634244, + 0.046978127, + -0.025531592, + 0.02369796, + 0.058490556, + -0.045910444, + -0.03460691, + 0.008773583, + -0.050227605, + -0.0207038, + 0.036022756, + 0.032216225, + -0.056308765, + 0.07009582, + -0.033748124, + 0.0053094104, + -0.010206834, + -0.012533691, + -0.04574797, + -0.040618442, + -0.032982174, + 0.010473755, + -0.024069328, + 0.03198412, + 0.0094234785, + 0.02889712, + 0.02286238, + -0.00025712923, + -0.0068238983, + -0.019392405, + -0.0007920596, + -0.026529646, + -0.016258981, + 0.049949076, + 0.05658729, + -0.021376906, + 0.029639857, + -0.017814089, + 0.014413744, + -0.027899066, + 0.050088342, + 0.0072997143, + 0.00453766, + -0.011146861, + 0.0002939397, + 0.013183585, + 0.030684331, + -0.030730752, + 0.0074505825, + -0.0134737175, + 0.05867624, + 0.05227013, + 0.0076768855, + -0.033957016, + -0.0104911635, + 0.016572325, + 0.0017233824, + -0.024742434, + -0.032169804, + -0.009667189, + -0.035117544, + -0.017303456, + -0.04827792, + -0.015365376, + -0.011396374, + -0.015168087, + -0.08272236, + -0.025531592, + -0.01024165, + -0.0025865242, + -0.047767285, + -0.010015347, + -0.01154144, + 0.001669708, + -0.013496928, + 0.008988281, + 0.0070676086, + -0.0038732586, + -0.022804353, + -0.059465397, + 0.032935753, + 0.017013324, + 0.027991908, + 0.03488544, + -0.01768643, + -0.038042072, + 0.045074865, + 0.010752281, + -0.0082977675, + 0.018638061, + -0.019171905, + 0.03504791, + 0.007264898, + 0.03834381, + -6.364765e-05, + 0.003577324, + 0.0050802063, + -0.010839321, + 0.0134156905, + -0.004444818, + 0.0030115673, + 0.006910938, + 0.017988168, + 0.030034436, + -0.0049815616, + -0.034769386, + 0.0053297197, + 0.017303456, + -0.0026372974, + -0.014320902, + -0.027968697, + 0.017396297, + 0.0102648605, + 0.02629754, + 0.015121666, + 0.0038268375, + 0.05380203, + -0.019148694, + 0.042498495, + -0.0018220273, + -0.0075318194, + 0.006110174, + 0.0104911635, + -0.04043276, + -0.030707542, + 0.00019529491, + 0.01883535, + 0.033051807, + -0.009812254, + 0.018754115, + 0.011466006, + -0.0055734306, + -0.01358977, + 0.00711403, + 0.010711663, + 0.01037511, + 0.02046009, + 0.012545296, + -0.0032088568, + -0.0033626268, + -0.013543349, + 0.0075782407, + 0.026854591, + 0.043032337, + 0.02806154, + 0.045423023, + 0.00076014514, + -0.015817981, + -0.002656156, + 0.042475283, + -0.0026909718, + 0.0003448941, + -0.017419508, + 0.005431266, + 0.026901014, + -0.01856843, + -0.019891432, + 0.0242318, + 0.021284062, + -0.022386564, + 0.03713686, + 0.026436802, + -0.034258753, + -0.0077407146, + 0.01883535, + -0.0025401032, + 0.011756137, + 0.04786013, + 0.00962657, + 0.014622639, + 0.011686506, + -0.008036649, + 0.0061507924, + -4.490242e-05, + -0.05268792, + -0.013183585, + -0.01444856, + 0.0111642685, + 0.024835275, + -0.02590296, + 0.014529796, + -0.020866273, + -0.016317008, + 0.008355794, + 0.0022166064, + 0.005724299, + 0.011378966, + -0.0034670741, + 0.026227908, + -0.0118954005, + 0.019090667, + 0.0022427181, + -0.030405805, + 0.039388284, + -0.037345756, + -0.011831571, + 0.019612905, + -0.011645887, + 0.034676544, + 0.029593436, + 0.08601825, + 0.019937852, + 0.004700134, + 0.010798703, + 0.013125559, + -0.04841718, + 0.0015391487, + -0.01856843, + 0.00036121398, + 0.04349655, + 0.039945334, + -0.007792938, + -0.005706891, + 0.043798286, + -8.5407526e-05, + 0.0077407146, + -0.030870015, + -0.01599206, + -0.031566333, + 0.0113905715, + 0.040896967, + 0.014390534, + 0.02592617, + 0.026111854, + 0.054173395, + -0.0034409622, + -0.0066440166, + 0.00024987594, + -0.022398168, + -0.003794923, + 0.016758008, + 0.023883643, + 0.00131067, + 0.015005613, + 0.000107167405, + 0.0027011263, + 0.03363207, + 0.063271925, + 0.018243482, + 0.049113497, + -0.025044171, + -0.0056952857, + -0.013671007, + 0.045005232, + 0.016224166, + 0.024278222, + -0.019218326, + 0.017535562, + 0.07264898, + 0.015342166, + 0.014970797, + -0.000658599, + 0.04658355, + 0.0008246994, + -0.00026238788, + -0.004270739, + 0.0011946174, + -0.047952972, + 0.00700378, + -0.00605795, + -0.06090445, + 0.01900943, + 0.01903264, + -0.012638138, + 0.03270365, + 0.044239286, + 0.017941745, + -0.021620616, + -0.00068144687, + -0.010113992, + 0.02281596, + 0.0034902846, + 0.0015420502, + -0.016096508, + 0.0078451615, + 0.036045965, + 0.005434167, + -0.031357437, + 0.014576218, + 0.0076884907, + 0.0059418976, + -0.036463756, + -0.0036469558, + -0.015817981, + 0.025369117, + -0.04054881, + 0.008408017, + 0.012545296, + 0.017013324, + -0.016932087, + -0.0071372404, + -0.01856843, + 0.022630274, + 0.011721321, + -0.011349953, + 0.024719223, + -0.0041372785, + -0.04024707, + -0.003916778, + -0.009609163, + 0.00350189, + 0.027017066, + -0.0006611377, + -0.0033539226, + 0.016583929, + 0.004308456, + -0.007949609, + 0.047419127, + 0.022769537, + 0.005268792, + -0.034281965, + 0.021620616, + 0.022374958, + -0.013044322, + 0.018382747, + 0.024835275, + -0.044657074, + 0.0013839283, + 0.0040821536, + 0.011146861, + -0.013554954, + -0.029430961, + 0.036719073, + -0.024788855, + 0.0088084, + -0.012754191, + -0.060486663, + 0.014529796, + -0.050274026, + 0.0076652803, + 0.00010517275, + -0.05166666, + -0.0011554495, + -0.07975141, + 0.03277328, + 0.027666962, + 0.020866273, + 0.051109605, + -0.031728804, + 0.009127544, + -0.002255774, + 0.03407307, + 0.015075244, + -0.029036382, + 0.02354709, + 0.017106166, + -0.0440536, + -0.020808248, + 0.00808307, + -0.010676848, + 0.015400192, + -0.020320825, + 0.03247154, + -0.019485246, + 0.0026155375, + -0.040664863, + -0.016978508, + -0.030916436, + 0.024579959, + 0.009597558, + 0.024835275, + -0.02847933, + -0.020309221, + -0.021725064, + 0.01920672, + -0.008007635, + 0.032634016, + 0.013554954, + 0.0050918115, + -0.0007768277, + 0.023373011, + -0.007125635, + -0.003063791, + 0.030196909, + 0.009522123, + -0.013659402, + -0.0019815997, + 0.027899066, + 0.041918233, + 0.00464791, + 0.027434856, + 0.00030608897, + -0.012672953, + -0.0050657, + -0.020169957, + -0.019833405, + -0.022920405, + -0.024974538, + 0.03409628, + 0.055287503, + 0.001724833, + -0.021156406, + 0.00055523956, + 0.0013157474, + -0.02006551, + 0.011141058, + -0.058026344, + -0.025020959, + -0.0088084, + -0.03189128, + -0.028966751, + 0.020135142, + 0.0034902846, + 0.0016987212, + -0.032123383, + 0.00832678, + -0.007856767, + -0.074459404, + -0.027458066, + 0.016792824, + 0.010026952, + 0.02850254, + 0.010009544, + -0.0066324114, + 0.016641956, + 0.026042223, + 0.003667265, + 0.0120114535, + 0.014053981, + -0.008077268, + -0.061600767, + 0.017744455, + -0.01903264, + -0.011988243, + 0.01793014, + -0.007908991, + -0.02046009, + -0.0076130563, + 0.0242318, + 0.036719073, + 0.016978508, + -0.062343504, + 0.023373011, + -0.023976486, + 0.0051150224, + -0.0032784885, + -0.020541325, + -0.02182951, + -0.025786908, + 0.021237642, + 0.010560795, + -0.04006139, + -0.031171752, + 0.029918384, + -6.260499e-05, + 0.0176168, + 0.018173851, + -0.008750373, + -0.033840965, + 0.0054196604, + -0.004444818, + -0.010723269, + -0.014390534, + -0.01574835, + 0.031752016, + 0.032076962, + 0.008512464, + -0.020843063, + 0.027040277, + 0.0012388624, + -0.013032717, + 0.018533614, + -0.020796642, + -0.0061682006, + -0.018904982, + 0.043403707, + 0.00044136288, + -0.041221917, + 0.03230907, + 0.016966904, + 0.008953465, + -0.035465702, + -0.025949381, + 0.008280359, + -0.009725215, + 0.02977912, + -0.027458066, + 0.038924072, + 0.0012889102, + -0.012255164, + -0.029570226, + 0.004984463, + -0.012800612, + 0.020007484, + -0.0014267227, + -0.00096396264, + 0.0142048495, + -0.0068355035, + 0.011210689, + 0.0077058985, + -0.0049786605, + -0.01810422, + -0.0204833, + 0.05356992, + -0.040363126, + -0.014959192, + -0.0047494564, + 0.019125484, + -0.016375035, + -0.004853904, + -0.0400846, + 0.02416217, + 0.016375035, + 0.007653675, + -0.043775074, + 0.014901165, + 0.014808323, + -0.011593664, + -0.030452225, + 0.0052455817, + -0.03319107, + -0.0042852457, + 0.035628177, + 0.040409546, + 0.042591337, + 0.011257111, + -0.011448598, + 0.0063248714, + -0.017303456, + 0.036417335, + 0.013763849, + 0.013264823, + 0.0141468225, + -0.014181638, + -0.0018423364, + 0.017349876, + -0.010845124, + 0.0072474903, + -0.0002625692, + 0.004920634, + -0.018139035, + -0.011251308, + -0.03451407, + -0.017872114, + 0.004351976, + -0.01764001, + -0.016467877, + -0.0118954005, + 0.030475436, + 0.042637758, + 0.018556826, + -0.04187181, + 0.012197138, + 0.018173851, + -0.0120114535, + 0.046026498, + -0.040316705, + -0.019682536, + -0.046746023, + 0.04087376, + 0.030336173, + -0.018638061, + -0.02303646, + 0.045098074, + 0.021214431, + -0.026622487, + 0.03279649, + 0.007508609, + 0.021109983, + 0.047117393, + 0.028641803, + -0.018580036, + 0.023906853, + -0.00872136, + 0.029245278, + 0.04618897, + 0.06638214, + -0.013090744, + 0.029755909, + -0.015922429, + 0.0014201947, + -0.0047784694, + -0.011750335, + -0.010560795, + 0.0057562133, + 0.039922126, + 0.012858638, + 0.0014883757, + -0.015260928, + -0.0038674558, + -0.020274404, + -0.01601527, + -0.011802559, + -0.0072939117, + 0.01317198, + 0.054684028, + 0.016908877, + -0.02182951, + 0.0015899219, + 0.029059593, + -0.014367322, + 0.033980228, + -0.005178851, + 0.0065975953, + 0.01491277, + -0.012498874, + 0.00028650506, + 0.0588155, + 0.019102274, + 0.018359536, + 0.011976638, + 0.020274404, + 0.002859248, + -0.0213653, + 0.024510328, + 0.030962858, + 0.017872114, + -0.0017698035, + 0.021260852, + 0.0003619393, + 0.021109983, + 0.028781068, + -0.003327811, + 0.00453766, + -0.0030724949, + -0.014460165, + -0.015284139, + 0.026483223, + 0.012371217, + 0.011907006, + 0.0108857425, + 0.025113802, + -0.010073373, + 0.004337469, + 0.00014488453, + 0.018498799, + 0.04015423, + 0.0007010308, + 0.027713383, + -0.0063074636, + 0.004380989, + 0.009638175, + -0.011129453, + 0.006446727, + -0.011007598, + -0.020982327, + 0.039341863, + -0.0207038, + -0.004674022, + 0.0006799962, + -0.01570193, + -0.035604965, + -0.013508533, + 7.747424e-05, + -0.04010781, + -0.031566333, + -0.023233749, + 0.0050918115, + 0.019810194, + 0.04837076, + 0.020529721, + -0.012231953, + -0.012800612, + 0.008065662, + 0.0005131705, + -0.009284215, + 0.016688377, + 0.024696013, + -0.0061856085, + 0.015817981, + -0.022108037, + 0.014158428, + -0.037392177, + 0.013322849, + 0.019125484, + 0.019856615, + 0.0057301014, + -0.010102387, + 0.033817753, + 0.01788372, + -0.02264188, + -0.0071662534, + 0.009725215, + -0.018614851, + 0.016084902, + 0.031357437, + 0.0137522435, + 0.014773508, + 0.015318955, + 0.03363207, + 0.02286238, + -0.0032175607, + 0.0141468225, + 0.0047320486, + -0.009336439, + -0.007763925, + -0.008814202, + -0.0068471087, + 0.032517962, + 0.01143119, + 0.006382898, + 0.04043276, + 0.026692118, + 0.018429168, + 0.0124060325, + 0.022189274, + -0.04187181, + -0.017407903, + 0.030312963, + -0.0381117, + 0.012893454, + -0.0068935296, + -0.004946746, + 0.004517351, + -0.007850965, + 0.0209243, + -0.012464059, + 0.033608858, + 0.011640085, + -0.02119122, + -0.031798437, + -0.009643978, + -0.025810119, + -0.04574797, + 0.01744272, + 0.042962704, + -0.008547281, + -0.0058490555, + 0.0079031885, + -0.021284062, + 0.011083032, + -0.017999772, + 0.03890086, + -0.02462638, + 0.020030694, + 0.05779424, + 0.017106166, + 0.003577324, + 0.016932087, + 0.0008566139, + -0.013195191, + -0.00016664442, + -0.010763887, + 0.019171905, + 0.018440772, + -0.0009987785, + -0.006910938, + -0.028200803, + -0.0061043715, + -0.0043693837, + -0.04136118, + 0.0033713307, + -0.009278413, + 0.00852407, + -0.011251308, + 0.036974385, + 0.030939646, + 0.0021817905, + 0.010990189, + -0.01748914, + 0.0007586944, + -0.007508609, + 0.034792595, + 0.02418538, + 0.021667037, + 0.017419508, + -0.0022398168, + -0.011117848, + -0.040943388, + 0.003339416, + 0.0009102883, + -0.014808323, + 0.006429319, + 0.010201031, + -0.0018452378, + 0.015876008, + 0.0034960872, + -0.0064815427, + -0.019160299, + 0.028200803, + 0.025044171, + -0.03790281, + -0.0310557, + 0.013624585, + 0.0070269904, + 0.026483223, + 0.001724833, + -0.017999772, + 0.0006977668, + -0.018278299, + -0.01643306, + 0.03291254, + -0.0153305605, + -0.0014419546, + -0.045074865, + -0.011222295, + -0.025299486, + 0.0007184387, + -0.016026877, + 0.008274557, + -0.001423096, + -0.05686582, + 0.05524108, + -0.053337816, + 0.02156259, + 0.014761902, + -0.013891507, + -0.019496853, + -0.03103249, + -0.025020959, + 0.015539455, + 0.01616614, + 0.044216074, + -0.013833481, + 0.02332659, + 0.005315213, + 0.017964955, + 0.0113905715, + 0.00581424, + -0.04607292, + -0.012556901, + 0.012452453, + 0.023222143, + 0.00021832412, + -0.005515404, + -0.009539531, + -0.018231878, + -0.0043432717, + -0.012104295, + -0.011907006, + 0.022386564, + 0.008141096, + -0.015678719, + 0.0014064135, + -0.031868067, + 0.0022934913, + -0.019392405, + -0.032564383, + 0.042869862, + -0.022792747, + 0.00095235737, + -0.0022209582, + -0.022525826, + -0.030243332, + -0.011930216, + -0.020030694, + -0.021574195, + 0.028386489, + -0.020668983, + 0.00061616726, + -0.0046014893, + -0.017698035, + 0.020854669, + -0.032216225, + 0.0148779545, + 0.025624434, + 0.026529646, + 0.0051730485, + -0.0065685823, + 0.015249323, + -0.020993931, + -0.015736744, + 0.042080704, + -0.019438826, + 0.026460012, + 0.0075666355, + 0.014494981, + -0.039550755, + 0.0021020044, + -0.03140386, + 0.006179806, + -0.03063791, + -0.0025589617, + 0.038088493, + 0.004645009, + -0.010694255, + 0.0043983967, + -0.0061043715, + 0.0032639818, + 0.06322551, + 0.0035251004, + -0.02347746, + -0.014460165, + 0.03797244, + -0.013404085, + -0.018046193, + 0.011442795, + -0.001568162, + -0.024579959, + 0.023663143, + -0.023535484, + 0.01984501, + -0.03230907, + 0.0071372404, + -0.003815232, + -0.0004167017, + -0.020135142, + -0.009290018, + 0.012197138, + 0.021260852, + 0.0043113576, + -0.02720275, + -0.017547166, + -0.02673854, + 0.024812065, + -0.01768643, + -0.008820005, + -0.01361298, + 0.042800233, + -0.012197138, + 0.02636717, + -0.012823822, + 0.012765796, + 0.023198932, + 0.013682612, + -0.012301585, + -0.008500859, + 0.0003528727, + 0.017732851, + 0.008535676, + 0.017129377, + -0.013624585, + -0.017802482, + -0.0086923465, + 0.0024748235, + -0.0097020045, + 0.0057272003, + 0.008129491, + 0.010235847, + 0.008872228, + -0.0082977675, + -0.004584081, + 0.02636717, + 0.00034144876, + 0.034235545, + -0.032494754, + -0.016270587, + 0.025624434, + -0.032076962, + 0.013601375, + 0.016096508, + 0.021678641, + 0.02847933, + 0.025299486, + -0.0069283457, + -0.003731094, + -0.0032494753, + 0.024371065, + -0.006191411, + 0.036092386, + -0.003971903, + -0.021887537, + 0.04971697, + -0.0127425855, + 0.0004464402, + -0.0021194122, + -0.0076362668, + -0.004499943, + 0.026251119, + -0.029941594, + 0.01154144, + 0.041593283, + 0.019404009, + -0.004993167, + 0.021295669, + -0.023930065, + 0.0064061084, + -0.012498874, + -0.042452075, + -0.0048161866, + 0.0012330598, + 0.009568544, + 0.006504753, + 0.0053877463, + -0.009638175, + -0.015063639, + -0.014866349, + 0.029222067, + -0.017848903, + -0.026622487, + -0.0005124452, + -0.00420691, + -0.011233901, + -0.04407681, + 0.0032436727, + 0.0015768659, + 0.010334492, + -0.0008406567, + 0.0012410384, + -0.010897348, + 0.0002165108, + 0.03630128, + -0.002846192, + -0.028757857, + -0.0015826685, + 0.010891545, + 0.007671083, + 0.005585036, + 0.0107754925, + -0.0008587899, + 0.0068819243, + 0.022154458, + -0.00047726667, + -0.011907006, + 0.020692194, + -0.017732851, + 0.011918611, + -0.005149838, + 0.0027185343, + -0.023361407, + -0.022015195, + -0.004444818, + -0.028804278, + 0.0047320486, + -0.009498913, + -0.043334074, + -0.038993705, + -0.019531667, + -0.017535562, + 0.009620768, + -0.024115749, + 0.03158954, + 0.034374807, + 0.010734874, + -0.023233749, + -0.0008246994, + 0.0041662916, + -0.009115939, + 0.026576066, + -0.01574835, + 0.0051817526, + -0.02205001, + 0.0033162057, + 0.024719223, + 0.03504791, + 0.015690323, + -0.023454249, + -0.0109379655, + 0.0137522435, + 0.005094713, + 0.013380875, + 0.008280359, + 0.058954764, + 0.006284253, + -0.007409964, + -0.012638138, + -0.008077268, + 0.0211448, + 0.029454172, + 0.021759879, + 0.002065738, + 0.016792824, + 0.011216492, + -0.010003742, + 0.03532644, + -0.025137013, + 0.0149359815, + -0.009614965, + 0.025299486, + -0.07817309, + 0.0062958584, + -0.005149838, + 0.0048451996, + 0.008883833, + 0.02980233, + 0.029268488, + 0.04447139, + -0.008228135, + -0.006545372, + -0.011222295, + 0.045098074, + -0.01945043, + 0.0015754153, + 0.02636717, + -0.012301585, + 0.019891432, + -0.0049351407, + 0.022084827, + 0.004906127, + 0.02590296, + 0.008895438, + -0.029245278, + -0.023396222, + 0.044192865, + 0.029245278, + 0.02286238, + -0.021841116, + 0.012336401, + -0.042312812, + 0.013125559, + -0.010769689, + -0.012231953, + -0.03534965, + -0.03110212, + -0.005817141, + 0.04618897, + -0.02160901, + 0.004856805, + 0.014065586, + 0.006777477, + 0.015736744, + -0.008408017, + 0.018487193, + 0.042846654, + 0.004172094, + 0.022212485, + -0.0030115673, + 0.010949572, + -0.0076420694, + 0.0072765034, + -0.043241233, + 0.02889712, + 0.026692118, + -0.009539531, + 0.0152725335, + 0.012266769, + 0.007235885, + 0.030080857, + -0.006261043, + 0.01270777, + 0.019311167, + -0.013346059, + 0.013856691, + -0.0052658906 + ], + "Category": "Luxury", + "Tags": [ + "bar", + "concierge", + "restaurant" + ] + } + ] +} \ No newline at end of file diff --git a/sdk/search/azure-search-documents/samples/data/query_vector.json b/sdk/search/azure-search-documents/samples/data/query_vector.json new file mode 100644 index 000000000000..168dca84dd99 --- /dev/null +++ b/sdk/search/azure-search-documents/samples/data/query_vector.json @@ -0,0 +1,1538 @@ +[ + -0.045507785, + 0.028645637, + 0.014222746, + -0.018325701, + -0.020214563, + -0.038177505, + 0.015761355, + 0.047784425, + 0.010457533, + -0.042280458, + 0.046658613, + 0.0010140118, + 0.008381038, + -0.009988446, + 0.0053694933, + 0.05784167, + 0.004900405, + 0.011414473, + 0.037527036, + 0.08145868, + 0.0048034606, + -0.036801513, + -0.059943184, + -0.020614851, + -0.01619917, + 0.032973755, + -0.03532545, + -0.013622314, + 0.009012743, + -0.010657678, + -0.03354917, + -0.041229703, + 0.004687752, + -0.09882119, + 0.057391346, + 0.019413985, + -0.010832804, + -0.010069754, + 0.031922996, + -0.0033805603, + 0.010926622, + 0.0031381983, + 0.048660055, + -0.0047846967, + 0.011595854, + 0.001674644, + 0.03645126, + 0.034374762, + -0.030922277, + -0.012765447, + -0.01850083, + -0.053588606, + -0.00835602, + -0.06674808, + -0.013834967, + 0.008368528, + 0.01100793, + -0.004475099, + 0.07610483, + 0.0130844265, + 0.00012381967, + -0.0016246078, + 0.013859984, + 0.049685795, + 0.023642031, + 0.042455584, + -0.008443583, + 0.024104865, + -0.055990335, + -0.015673792, + 0.009100306, + -0.03972862, + -0.043931648, + 0.0052350215, + 0.06809906, + -0.0184633, + -0.003202307, + 0.0057729087, + 0.009606921, + -0.018625919, + 0.0040091383, + -0.016599458, + -0.0022719493, + -0.004809715, + -0.0045595346, + -0.052087523, + -0.041980244, + -0.000460488, + -0.034574907, + 0.048359837, + 0.03342408, + -0.014598017, + 0.026343979, + -0.058291994, + -0.016737057, + 0.0073052626, + 0.020539798, + -0.010194845, + -0.033499133, + -0.014635543, + 0.037502017, + -0.089964814, + 0.0035807046, + -0.037176784, + -0.0020061329, + -0.0072990083, + 0.024117375, + 0.02090256, + 0.022853965, + -0.0027723096, + -0.0719018, + 0.02684434, + 0.010957894, + 0.024254974, + -0.039953783, + 0.055740155, + -0.01708731, + -0.016962219, + 0.01481067, + 0.05934275, + 0.019701693, + 0.021102702, + -0.008024531, + -0.035150323, + -0.00784315, + -0.042105332, + -0.04695883, + -0.014410381, + -0.056740876, + -0.04115465, + 0.0025393295, + 0.02847051, + 0.020552306, + 0.01456049, + -0.034224655, + -0.017149854, + -0.015923971, + -0.02254124, + -0.041054577, + -0.00031116165, + -0.00522564, + -0.015798882, + 0.011233092, + -0.027669935, + 0.014535472, + 0.020777468, + 0.019914346, + 0.017762797, + 0.017537635, + 0.040354073, + 0.0073115174, + -0.012790465, + -0.0087375445, + 0.009544376, + -0.06434636, + -0.013222026, + -0.024079848, + -0.019964382, + -0.024530172, + 0.023979776, + -0.055740155, + -0.024830388, + -0.016549421, + 0.03770216, + 0.020152017, + -0.049185432, + -0.020402199, + 0.041304756, + -0.074503675, + -0.050211173, + 0.06669805, + 0.0069675194, + -0.035875846, + 0.06894967, + -0.0031585253, + 0.0018700972, + 0.0086062, + -0.0059386534, + -0.02696943, + -0.007793114, + 0.0016246078, + -0.04463215, + -0.0043687723, + 0.031922996, + 0.03975364, + 0.06309546, + -0.035900865, + 0.015160922, + -0.037276853, + 0.011752216, + -0.04795955, + 0.00068017753, + -0.002251622, + 0.003399324, + 0.044982407, + 0.01107673, + -0.017012255, + 0.01045128, + 0.010207353, + 0.027419753, + -0.060293436, + -0.02139041, + 0.028745709, + -0.0027300918, + -0.03987873, + -0.032323286, + 0.011764726, + -0.015873935, + -0.034850106, + -0.05243778, + -0.0066860667, + -0.0030224898, + 0.032098126, + 0.033924438, + -0.0139725655, + -0.007993259, + -0.00170748, + 0.030471953, + -2.3869736e-05, + 0.06764873, + 0.0009757029, + -0.008087076, + 0.01657444, + 0.0445571, + 0.033198915, + 0.07735573, + -0.01850083, + -0.048359837, + 0.0055070925, + -0.015773864, + -0.0040028836, + 0.015486157, + -0.017174874, + -0.021365391, + -0.032998774, + 0.043506343, + 0.0076367515, + 0.016737057, + 0.020602342, + -0.0445571, + -0.08651233, + -0.03227325, + -0.033949457, + 0.00597618, + 0.02922105, + 0.0013275188, + -0.00064225955, + -0.03154773, + 0.02315418, + 0.017600179, + -0.025230676, + 0.067048304, + -0.039928764, + 0.00629516, + 0.011445746, + -0.015986517, + 0.0032116887, + 0.028320402, + 0.014385363, + -0.021553027, + 0.05101175, + -0.025355767, + 0.025030533, + 0.0003373524, + 0.023216726, + 0.005638437, + 0.01786287, + -0.01669953, + 0.006267015, + -0.024517663, + -0.06279524, + -0.024217447, + -0.043581396, + -0.0020405324, + 0.009613176, + -0.014698089, + -0.011777234, + -0.013146971, + 0.060293436, + 0.0066610486, + -0.031422637, + -2.5958641e-07, + 0.024267482, + -0.005466438, + -0.020427216, + 0.00328987, + 0.0147731425, + -0.0139725655, + -0.030421916, + 0.0045313896, + -0.01708731, + -0.023016581, + -0.0058166906, + -0.040304035, + 0.0016902802, + -0.0015331358, + 0.0033836877, + -0.053738713, + 0.032223213, + -0.002925545, + 0.0038871753, + 0.06879956, + -0.02784506, + -0.0060043256, + -0.0061982153, + 0.020990122, + 0.023266762, + -0.0074366075, + 0.030046646, + 0.019976892, + 0.008055803, + 0.03304881, + -0.031347584, + 0.010394989, + -0.009907138, + -0.0369266, + -0.026394015, + 0.015361066, + -0.020990122, + -0.04620829, + 0.07900692, + 0.027669935, + 0.019639147, + -0.02329178, + -0.023617014, + -0.009681975, + -0.03530043, + -0.021615572, + -0.025505874, + -0.016849639, + 0.02329178, + -0.02809524, + -0.039428405, + 0.069149815, + -0.009200378, + 0.020990122, + -0.040304035, + -0.018675955, + 0.039428405, + 0.077505834, + -0.09551881, + -0.01797545, + 0.016336769, + -0.025343258, + -0.0009131578, + 0.003586959, + 0.012746682, + 0.027294664, + 0.011014185, + 0.049335543, + -0.059893146, + -0.008318493, + -0.0023423124, + 0.038377646, + -0.0064609046, + -0.031697836, + 0.052587885, + -0.010764005, + -0.004265573, + 0.046758685, + -0.030722132, + -0.010588879, + 0.017887887, + -0.017112328, + -0.019701693, + -0.0155737195, + 0.014185219, + -0.012471485, + -0.07105119, + -0.0019373331, + 0.0072302087, + 0.05198745, + 0.011045457, + -0.017637707, + 0.019476531, + 0.068299204, + 0.04745919, + 0.059292715, + 0.030797187, + -0.00052342395, + 0.013096935, + -0.022916509, + 0.013359624, + -0.016249206, + 0.053438496, + 0.0011015749, + -0.051236913, + -0.010445025, + -0.06694823, + 0.0098821195, + 0.022428658, + -0.0102824075, + -0.01669953, + -0.0112831285, + 0.019914346, + -0.031597763, + 0.017500108, + -0.0066860667, + -0.0053851297, + 0.0011523927, + 0.0044688443, + -0.026469069, + -0.0013799004, + 0.012777955, + -0.0054820743, + -0.021027649, + 0.0031553982, + -0.024342537, + -0.03444982, + 0.0110266935, + 0.025793582, + -0.00905027, + 0.04075436, + -0.009681975, + 0.0049660774, + -0.026118817, + -0.0075992243, + -0.071651615, + 0.01960162, + 0.059642967, + 0.015661282, + -0.015898954, + -0.019138787, + 0.026243906, + -0.043231145, + 0.007061337, + 0.0025565294, + -0.02784506, + -0.09401773, + -0.01367235, + -0.009913391, + -0.047108937, + 0.032298267, + 0.05659077, + -0.034124583, + -0.02937116, + -0.033699278, + -0.022578767, + 0.0059417807, + -0.017387526, + 0.02200335, + -0.042080317, + -0.025718529, + -0.021815715, + -0.04658356, + -0.019076243, + 0.020414706, + 0.035400502, + 0.00734279, + -0.04408176, + -0.037251838, + 0.014485436, + -0.009056524, + -0.024642752, + 0.027169574, + 0.0072114454, + 0.045132514, + -0.019413985, + -0.033298988, + -0.018250648, + 0.063695885, + 0.013134462, + -0.004350009, + 0.020051945, + 0.022503711, + -0.024492646, + 0.0010015027, + 0.01045128, + 0.036426242, + -0.07550439, + 0.0036088498, + -0.07570454, + 0.0058385814, + -0.033824366, + -0.06349574, + -0.028020186, + -0.023066618, + 0.025893655, + 0.007799369, + 0.015423612, + -0.041579954, + -0.02090256, + 0.017450072, + -0.0061169066, + 0.00029669813, + -0.014648053, + -0.004934805, + 0.037151765, + -0.040679306, + 0.023429379, + -0.00670483, + 0.009325468, + 0.01189607, + -0.013509733, + -0.0053601116, + 0.057691563, + -0.031998053, + 0.015273503, + -0.0006051234, + 0.0041811373, + -0.009256668, + -0.013272061, + -0.014247764, + 0.0037745943, + 0.030321844, + 0.039303314, + -0.053338427, + 0.01786287, + 0.05088666, + 0.006711085, + -0.0016652622, + 0.020064455, + -0.014185219, + 0.015498665, + 0.0042843367, + 0.015110886, + -0.002181259, + 0.044156812, + 0.0033680515, + 0.0022000223, + 0.0064358865, + 0.024955478, + 0.016536914, + 0.0074115894, + 0.019801766, + 0.037877288, + 0.022853965, + -0.122988604, + 0.0036463768, + -0.041029558, + 0.002875509, + 0.033824366, + -0.02494297, + 0.015898954, + 0.011921088, + 0.064946786, + 0.029546285, + -0.016436841, + 0.010376225, + -0.033749312, + 0.0129093, + -0.018625919, + -0.0476093, + 0.0029443086, + -0.017475089, + -0.05013612, + -0.019801766, + -0.010651424, + 0.03557563, + 0.04530764, + 0.014097656, + 0.020965103, + -0.0060981433, + 0.004065429, + 0.0067673754, + -0.014397873, + -0.0137724215, + -0.0017684615, + 0.012208795, + 0.035375483, + -0.045107495, + -0.0148607055, + 0.017112328, + 0.013834967, + 0.0019576603, + 0.011996143, + 0.026619177, + 0.031647798, + -0.019439004, + 0.0061763246, + -0.01253403, + 0.023004072, + -0.021678118, + -0.017925413, + 0.03785227, + 0.0072114454, + 0.003299252, + -0.007855659, + 0.0073052626, + 0.0075491886, + 0.010601387, + 0.00923165, + -0.0067486116, + 0.008205912, + -0.008062058, + -0.00064734137, + -0.00012714238, + -0.013259552, + 0.0012532466, + 0.018163085, + 0.015498665, + 0.023504432, + -0.008305984, + -0.0627452, + -0.0028473637, + 0.030572025, + -0.012759192, + 0.009069033, + -0.025618456, + -0.017287454, + 0.00809333, + 0.023854686, + 0.015323539, + 0.014748124, + -0.001452609, + 0.0052350215, + -0.044106774, + 0.004709643, + 0.0034524873, + -0.0022172222, + 0.001502645, + -0.03254845, + -0.003027181, + -0.01847581, + 0.025168132, + -0.021077685, + -0.017575162, + -0.0105388425, + 0.012265086, + 0.0025471475, + 0.008631218, + -0.023066618, + 0.01644935, + 0.059592932, + -0.013297079, + 0.03149769, + 0.018125558, + -0.042980965, + 0.0014229001, + -0.0048253513, + -0.017687742, + 0.068299204, + 0.026769284, + -0.024630243, + -0.023829667, + -0.027369717, + -0.008399801, + -0.007699297, + -0.0088251075, + -0.0072114454, + 0.013947548, + 0.005491456, + 0.025280712, + -0.02252873, + 0.05834203, + -0.009275432, + -0.0035494321, + -0.00068369566, + 0.008218421, + -0.012802973, + -0.018325701, + -0.0043343725, + 0.012258831, + 0.0034556144, + -0.00091237604, + 0.034099564, + 0.020915067, + 0.0011907015, + 0.00039696565, + 0.011514545, + 0.017675234, + -0.0049129142, + -0.016599458, + -0.0015972444, + -0.04570793, + 0.035750754, + 0.013322097, + -0.01960162, + -0.03887801, + 0.00956314, + 0.08015775, + 0.025718529, + 0.02999661, + 0.010976657, + 0.030146718, + 0.012884282, + 0.016311752, + -0.0011500473, + -0.015711319, + 0.0070988643, + 0.044532083, + -0.0039372114, + -0.028295385, + -0.018813554, + -0.024355046, + -0.030972313, + 0.0060762526, + 0.014247764, + 0.019476531, + -0.012527775, + -0.0035775774, + -0.01606157, + 0.05073655, + 0.020202054, + -0.022691347, + 4.3830405e-05, + 0.0034368508, + -0.041229703, + 0.006404614, + -0.06419625, + 0.0021359138, + 0.022503711, + 0.008806344, + -0.07290252, + 0.004118592, + -0.009044016, + -0.013059408, + -0.036801513, + 0.03735191, + -0.00091706694, + 0.004981714, + -0.024204938, + -0.012333886, + 0.014497944, + -0.011583345, + -0.0516372, + -0.021540519, + 0.015798882, + 0.0013126644, + 0.00924416, + 0.017950432, + 0.013497223, + 0.019576604, + -0.0065109404, + -0.013784931, + 0.028245348, + 0.019226352, + -0.02254124, + -0.0062107244, + 0.040203962, + -0.002381403, + -0.030471953, + -0.0075867157, + 0.050411317, + -0.028320402, + -0.005729127, + -0.027820041, + 0.0051412038, + -0.009100306, + -0.011514545, + -0.00617007, + -0.008868889, + 0.005453929, + -0.014510454, + 0.009513103, + 0.0057134912, + 0.0125715565, + -0.014910742, + -0.044732224, + -0.037802234, + -0.010420007, + 0.009388013, + 0.006592249, + -0.004443826, + -0.0133721335, + -0.072452195, + -0.016499387, + -0.03274859, + 0.006326433, + 0.008568673, + 0.032873683, + -0.00082090386, + 0.033499133, + 0.047759406, + 0.018901117, + 0.010307426, + 0.015135904, + 0.006173197, + -0.024505153, + 0.00044993352, + -0.009094051, + -0.02684434, + 0.0030396897, + -0.026569141, + 0.028395457, + -0.032223213, + 0.006132543, + -0.0054820743, + 0.006254506, + -0.025493365, + 0.013559769, + -0.013209516, + -0.007542934, + -0.020001909, + -0.055139724, + 0.0135722775, + -0.00551022, + -0.022678837, + 0.008487364, + 0.03857779, + -0.009075288, + -0.03785227, + 0.009544376, + -0.038402665, + 0.02329178, + -0.02037718, + -0.011677163, + -0.016399315, + 0.05759149, + -0.006611013, + 0.02139041, + -0.0058166906, + 0.008599945, + -0.013847476, + 0.026669214, + -0.016511895, + -0.018713482, + -0.022291059, + 0.012246323, + -0.02100263, + -0.014923251, + 0.00037097037, + -0.003085035, + -0.014885724, + 0.025918672, + 0.0057197455, + -0.028170295, + 0.024480136, + -0.007530425, + -0.005181858, + -0.019038716, + -0.048509948, + 0.016136626, + 0.025243185, + -0.01973922, + 0.0036119772, + -0.011470764, + 0.01569881, + 0.0058073085, + -0.01621168, + -0.022616293, + -0.011633381, + 0.01632426, + 0.023617014, + -0.001389282, + -0.018713482, + 0.023867194, + -0.0011993014, + -0.024892934, + -0.00021656226, + 0.012996863, + 0.031847943, + -0.026544122, + -0.027394736, + -0.012746682, + -0.01340966, + 0.0213779, + 0.022453675, + -0.019188823, + -0.01043877, + -0.018638426, + 0.003977866, + 0.017187381, + -0.015198449, + -0.0020358416, + -0.021290338, + -0.019964382, + -0.03592588, + -0.022090914, + -0.024367554, + 0.007780605, + 0.023829667, + -0.0014135183, + 0.031147439, + -0.017362509, + 0.03862783, + 0.016924692, + 0.03189798, + 0.006598504, + -0.05909257, + -0.04403172, + -0.002888018, + -0.0033680515, + -0.014635543, + 0.013434678, + -0.026343979, + -0.0012985917, + 0.0007356862, + 0.004628334, + 0.02558093, + 0.027169574, + 0.0065359585, + -0.005960544, + 0.013484715, + -0.0106264055, + 0.009350486, + -0.037301872, + 0.060593653, + 0.031722855, + 0.011508291, + 0.00011013794, + 0.042830855, + 0.022190986, + 0.0033586696, + -0.053588606, + -0.02455519, + -0.0046439706, + 0.015110886, + -0.007961986, + -0.010764005, + 0.025893655, + -0.022378622, + 0.0045720437, + 0.02594369, + 0.02847051, + -0.015235976, + -0.011458254, + 0.02036467, + -0.023479415, + 0.005206876, + -0.008299729, + -0.026469069, + -0.019989401, + -0.009200378, + 0.0048441147, + 0.0067861388, + -0.013747403, + 0.015748845, + 0.0006074689, + -0.019251369, + -0.0018450792, + 0.0098821195, + -0.03757707, + 0.015611246, + -0.007924459, + 0.0038652846, + -0.01733749, + -0.020026928, + 0.021703135, + 0.00028399366, + -0.0036651404, + -0.013634822, + -0.022065897, + -0.027419753, + 0.043206125, + -0.021177758, + 0.028770726, + 0.00017737388, + -0.013109445, + 0.02681932, + -0.013997584, + -0.011158038, + 0.025280712, + -0.023054108, + 0.01810054, + -0.033874404, + -0.013759913, + 0.008618709, + 0.040579233, + -0.04465717, + 0.046508506, + -0.01911377, + 0.014135183, + 0.0043468815, + -0.0359509, + 0.01644935, + -0.011933597, + -0.0005199058, + 0.004190519, + -0.00759297, + 0.02696943, + 0.041429847, + -0.012365158, + -0.01669953, + 0.012959336, + 0.044857316, + 0.0015268812, + 0.036376204, + -0.012946827, + 0.015748845, + -0.019901838, + 0.0110829845, + 0.013021881, + -0.01043877, + -0.010463788, + 0.0014979541, + 0.0066547943, + 0.0359509, + 0.002905218, + -0.046858758, + 0.04368147, + 0.0503863, + 0.04720901, + 0.041304756, + 0.031147439, + 0.027920114, + 0.005563383, + 0.049285505, + 0.017274946, + -0.024792861, + -0.010707714, + 0.00923165, + -0.0105388425, + 0.020802487, + -0.0060981433, + 0.008368528, + 0.0019388968, + -0.026368996, + 0.013522241, + 0.043581396, + -0.03177289, + 0.0017528252, + -0.027920114, + -0.00689872, + -0.034099564, + -0.032448377, + -0.002855182, + 0.04745919, + -0.032698557, + -0.013284571, + -0.01923886, + 0.014598017, + 0.0126278475, + -0.048635036, + 0.003077217, + -0.0015925536, + 0.0033711786, + -0.0044188085, + 0.010101027, + 0.034524873, + 0.012077451, + 0.0031835434, + -0.0033430334, + -0.007255227, + 0.019589113, + -0.004328118, + 0.045007423, + -0.024605226, + 0.04090447, + -0.015974008, + -0.00087719446, + 0.006861193, + 0.027419753, + 0.028445492, + -0.006442141, + 0.015723828, + -0.0028645636, + -0.0148607055, + -0.015811391, + 0.028195312, + 0.005872981, + 0.015548701, + 0.02316669, + 0.008030785, + -0.0258186, + -0.01543612, + 0.01783785, + 0.009769538, + 0.043831576, + 0.012659119, + -0.0141727105, + -0.008074567, + -0.015398594, + -0.036025953, + -0.0058667264, + 0.0025299476, + 9.230283e-05, + 0.002695692, + -0.020189544, + 0.009419286, + 0.002695692, + 0.016511895, + -0.010914112, + -0.051061787, + -0.015473647, + -0.01506085, + -0.0038215031, + 0.02784506, + -0.016149133, + 0.0052475305, + 0.023967266, + -0.004859751, + -0.0036495042, + -0.007392826, + 0.0023532577, + -0.032573465, + -0.0242925, + 0.0034087056, + -0.003621359, + -0.011602108, + -0.0027598008, + 0.001540172, + 0.013484715, + 0.0022719493, + -0.014160201, + 0.008675, + 0.0102824075, + 0.020477252, + -0.014948268, + -0.0031991797, + -0.002121841, + 0.019889329, + -0.0007548407, + -0.022891492, + 0.018263157, + -0.0015902082, + 0.0062388694, + 0.015073359, + -0.0062982873, + -0.016949711, + -0.004265573, + 0.017112328, + 0.00057267817, + -0.030346863, + 0.0018904244, + -0.0100635, + -0.004972332, + -0.010107282, + -0.0061294157, + 0.027995167, + 0.007693042, + -0.012540285, + -0.004697134, + 0.042205404, + -0.015298521, + 0.0015761355, + 0.010857822, + -0.017249927, + -0.016399315, + 0.001469027, + -0.03762711, + -0.04152992, + 0.027244627, + 0.0004737788, + -0.017637707, + 0.017700251, + -0.016399315, + 0.00094443036, + -0.02100263, + 0.009832083, + 0.030446934, + -0.021765681, + -0.00040615196, + -0.019226352, + -0.028145276, + -0.03467498, + 0.012102469, + 0.0062763966, + 0.0033555424, + 0.014535472, + -0.006107525, + 0.017562652, + -0.010939131, + -0.03820252, + 0.039328333, + 0.01100793, + -0.012996863, + 0.061844554, + 0.022929018, + -0.0012149378, + -0.0062513785, + -0.011039203, + 0.0025596565, + 0.016987238, + -0.004537644, + -0.008975216, + 0.019313915, + 0.001059357, + 0.006698576, + 0.017437562, + -0.0131719895, + 0.008268457, + -0.022941528, + 0.009975937, + 0.02518064, + -0.015898954, + -0.029971592, + -0.008143366, + -0.0027707461, + 0.024993006, + 0.009119069, + -0.016787093, + -0.025243185, + -0.005973053, + 0.012171268, + 0.004772188, + -0.0012845191, + -0.0002779346, + -0.029020907, + -0.034249675, + -0.017387526, + 0.02022707, + 0.007455371, + -0.00024451208, + -0.029896537, + 0.011039203, + -0.014022602, + 0.027569862, + 0.028445492, + -0.025505874, + -0.017550142, + -0.0046564797, + 0.038552772, + -0.029846502, + -0.011001675, + -0.010401243, + 0.011477018, + -0.0045845527, + 0.021352883, + 0.02505555, + 0.011495782, + -0.013522241, + -0.047784425, + 0.019476531, + -0.004350009, + 0.009794557, + 0.004909787, + -0.018075522, + -0.0213779, + -0.009006488, + -0.023516942, + 0.011408218, + -0.0427558, + -0.048635036, + 0.025593437, + 0.00172468, + 0.00023591214, + -0.010294916, + 0.0063639595, + 0.025843618, + -0.033073828, + 0.0006019962, + 0.024054829, + 0.028045204, + 0.046658613, + 0.0037996122, + -0.050561424, + -0.005034877, + 0.014422891, + 0.008787581, + 0.0016042808, + 0.0012337012, + 0.015286013, + 0.0021937678, + -0.0032210704, + 0.038052414, + 0.026168853, + 0.026168853, + -0.01481067, + -0.015661282, + -0.005256912, + -0.025480857, + -0.0029396177, + 0.0010820295, + 0.004794079, + -0.035000216, + -0.026444051, + 0.008818854, + 0.006973774, + 0.012821737, + 0.006054362, + -0.024655262, + -0.0069049746, + -0.0029396177, + -0.007936968, + 0.03404953, + 0.010545096, + -0.011433236, + -0.00061763247, + 0.0059292717, + -0.052487813, + -0.008531146, + -0.009656957, + -0.02266633, + -0.022828946, + -0.002930236, + 0.015673792, + -0.0046658614, + -0.011739708, + 0.01619917, + -0.015035832, + 0.02594369, + -0.007868168, + 0.022065897, + 0.04110461, + -0.014623035, + -0.012602829, + 0.003349288, + -0.016349278, + 0.0027598008, + -0.00092723046, + -0.030797187, + -0.004503244, + -0.0242925, + 0.008174639, + 0.021452954, + -0.007555443, + -0.00446259, + 0.035275415, + -0.0029896537, + 0.011133021, + 0.027644916, + -0.008637473, + -0.023429379, + -0.0021859498, + 0.011289383, + -0.0007571861, + -0.026394015, + 0.024880424, + -0.0071926815, + -0.03377433, + 0.040429126, + 0.03667642, + 0.011733453, + -0.009388013, + 0.009075288, + -0.015361066, + -0.0035150324, + -0.0095381215, + -0.015085868, + 0.0043844087, + 0.023004072, + -0.027669935, + 0.0172124, + 0.000673923, + -0.0057134912, + 0.019313915, + -0.002875509, + 0.016636986, + -0.016286733, + -0.0142978, + -0.0034431054, + -0.012665374, + -0.030547006, + -0.02075245, + -0.0046596066, + -0.019188823, + -7.246431e-05, + -0.0013908457, + -0.011014185, + 0.004834733, + 0.038127467, + 0.0057322546, + -0.015498665, + 0.003977866, + 0.04668363, + -0.012252577, + 0.023341816, + -0.0011164293, + 0.0050036046, + 0.023504432, + -0.0035025233, + 0.019026207, + -0.013397152, + 0.0054414202, + -0.029496249, + -0.016549421, + -0.016024044, + -0.010576369, + -0.009738266, + -0.010244881, + 0.022378622, + -0.0016433714, + -0.011376946, + 0.028445492, + -0.0184633, + -0.00471277, + -0.020990122, + 0.0068549383, + -0.0024001666, + 0.022378622, + 0.015336048, + 0.01619917, + -0.026143834, + 0.0498359, + 0.0054132747, + 0.02379214, + -0.017012255, + -0.049810883, + -0.021615572, + -0.0054101474, + 0.0063764686, + 0.005106804, + 0.022266041, + 0.010989167, + -0.0052850572, + -0.025318239, + -0.009400522, + 0.0071864272, + 0.028195312, + -0.02505555, + 0.022053387, + 0.0023876575, + 0.009688229, + -0.03444982, + 0.019013697, + -0.0045814253, + 0.009769538, + -0.034599926, + 0.009957173, + -0.023379343, + -0.0041592466, + -0.011014185, + 0.026644194, + 0.011301892, + -0.023316797, + 0.012734174, + -0.0140851475, + 0.013272061, + 0.00042530638, + -0.03760209, + -0.021077685, + -0.019926855, + 0.010088518, + -0.011539564, + -0.0100259725, + -0.042455584, + -0.021815715, + 0.00029865265, + -0.03960353, + 0.0044719717, + 0.0027738733, + -0.006611013, + 0.0013986639, + 0.008518637, + -0.011852289, + 0.036776494, + -0.0030084173, + 0.0019858056, + -0.013872494, + -0.019789256, + -0.014035111, + -0.0049785865, + -0.0005969144, + -0.0023454397, + 0.028645637, + -0.001619917, + -0.036126025, + 0.010995422, + 0.023591995, + -0.054088965, + 0.0063514505, + -0.02468028, + 0.022253532, + 0.010457533, + 0.00019721239, + -0.024455117, + -0.033098843, + 0.036751475, + -0.01607408, + 0.0026268924, + -0.033899423, + -0.007355299, + -0.014185219, + -0.015773864, + -0.004728406, + 0.009488085, + 0.021415427, + 0.03847772, + -0.044356953, + 0.031047367, + 0.009732011, + 0.038402665, + 0.011989888, + 0.011452001, + 0.0034399782, + 0.017575162, + 0.01975173, + -0.02504304, + 0.00011160384, + 0.0059261443, + -0.0026253287, + -0.04430692, + 0.038652845, + 0.026644194, + -0.0102824075, + 0.021690626, + -0.015798882, + -0.020139508, + -0.012033669, + 0.004806588, + -0.063295595, + 0.0036495042, + 0.010982912, + -0.020602342, + 0.0027363463, + 0.033699278, + 0.0074866433, + 0.03820252, + -0.015135904, + -0.0066673034, + -0.046858758, + 0.012458975, + -0.0012243196, + 0.009169105, + -0.0028567456, + -0.0049410597, + 0.017037274, + -0.023529451, + 0.017700251, + -0.01708731, + 0.0244301, + 0.02962134, + -0.013684859, + 0.035500575, + 0.043506343, + -0.0036276134, + 0.022753892, + 0.0038934299, + -0.0014002275, + -0.005622801, + 0.025218168, + -0.0040497924, + 0.017750287, + -0.022416148, + -0.003172598, + 0.010213608, + 9.0836926e-05, + 0.004884769, + -0.013534751, + -0.010663932, + -0.0033461605, + -0.01431031, + -0.017074801, + 0.0045908075, + 0.03407455, + -0.0054883286, + 0.009694484, + 0.018175595, + -0.012515266, + -0.0001456127, + -0.0044719717, + 0.0029615085, + 0.008556164, + 0.017887887, + -0.027744988, + -0.0035588138, + -0.010782768, + 0.021165248, + -0.05634059, + 0.0041404827, + 0.019639147, + 0.022178477, + -0.03645126, + 0.0048128422, + -0.006132543 +] \ No newline at end of file diff --git a/sdk/search/azure-search-documents/samples/synonym_map.txt b/sdk/search/azure-search-documents/samples/data/synonym_map.txt similarity index 100% rename from sdk/search/azure-search-documents/samples/synonym_map.txt rename to sdk/search/azure-search-documents/samples/data/synonym_map.txt diff --git a/sdk/search/azure-search-documents/samples/files/hotel_small.json b/sdk/search/azure-search-documents/samples/files/hotel_small.json deleted file mode 100644 index e30a1f96199b..000000000000 --- a/sdk/search/azure-search-documents/samples/files/hotel_small.json +++ /dev/null @@ -1,252 +0,0 @@ -[ - { - "@search.action": "upload", - "hotelId": "1", - "hotelName": "Fancy Stay", - "description": "Best hotel in town if you like luxury hotels. They have an amazing infinity pool, a spa, and a really helpful concierge. The location is perfect -- right downtown, close to all the tourist attractions. We highly recommend this hotel.", - "descriptionFr": "Meilleur hĆ“tel en ville si vous aimez les hĆ“tels de luxe. Ils ont une magnifique piscine Ć  dĆ©bordement, un spa et un concierge trĆØs utile. L'emplacement est parfait – en plein centre, Ć  proximitĆ© de toutes les attractions touristiques. Nous recommandons fortement cet hĆ“tel.", - "category": "Luxury", - "tags": [ - "pool", - "view", - "wifi", - "concierge" - ], - "parkingIncluded": false, - "smokingAllowed": false, - "lastRenovationDate": "2010-06-27T00:00:00+00:00", - "rating": 5, - "location": { - "type": "Point", - "coordinates": [ - -122.131577, - 47.678581 - ] - } - }, - { - "@search.action": "upload", - "hotelId": "2", - "hotelName": "Roach Motel", - "description": "Cheapest hotel in town. Infact, a motel.", - "descriptionFr": "HĆ“tel le moins cher en ville. Infact, un motel.", - "category": "Budget", - "tags": [ - "motel", - "budget" - ], - "parkingIncluded": true, - "smokingAllowed": true, - "lastRenovationDate": "1982-04-28T00:00:00+00:00", - "rating": 1, - "location": { - "type": "Point", - "coordinates": [ - -122.131577, - 49.678581 - ] - } - }, - { - "@search.action": "upload", - "hotelId": "3", - "hotelName": "EconoStay", - "description": "Very popular hotel in town", - "descriptionFr": "HĆ“tel le plus populaire en ville", - "category": "Budget", - "tags": [ - "wifi", - "budget" - ], - "parkingIncluded": true, - "smokingAllowed": false, - "lastRenovationDate": "1995-07-01T00:00:00+00:00", - "rating": 4, - "location": { - "type": "Point", - "coordinates": [ - -122.131577, - 46.678581 - ] - } - }, - { - "@search.action": "upload", - "hotelId": "4", - "hotelName": "Express Rooms", - "description": "Pretty good hotel", - "descriptionFr": "Assez bon hĆ“tel", - "category": "Budget", - "tags": [ - "wifi", - "budget" - ], - "parkingIncluded": true, - "smokingAllowed": false, - "lastRenovationDate": "1995-07-01T00:00:00+00:00", - "rating": 4, - "location": { - "type": "Point", - "coordinates": [ - -122.131577, - 48.678581 - ] - } - }, - { - "@search.action": "upload", - "hotelId": "5", - "hotelName": "Comfy Place", - "description": "Another good hotel", - "descriptionFr": "Un autre bon hĆ“tel", - "category": "Budget", - "tags": [ - "wifi", - "budget" - ], - "parkingIncluded": true, - "smokingAllowed": false, - "lastRenovationDate": "2012-08-12T00:00:00+00:00", - "rating": 4, - "location": { - "type": "Point", - "coordinates": [ - -122.131577, - 48.678581 - ] - } - }, - { - "@search.action": "upload", - "hotelId": "6", - "description": "Surprisingly expensive. Model suites have an ocean-view." - }, - { - "@search.action": "upload", - "hotelId": "7", - "hotelName": "Modern Stay", - "description": "Modern architecture, very polite staff and very clean. Also very affordable.", - "descriptionFr": "Architecture moderne, personnel poli et trĆØs propre. Aussi trĆØs abordable." - }, - { - "@search.action": "upload", - "hotelId": "8", - "description": "Has some road noise and is next to the very police station. Bathrooms had morel coverings.", - "descriptionFr": "Il y a du bruit de la route et se trouve Ć  cĆ“tĆ© de la station de police. Les salles de bain avaient des revĆŖtements de morilles." - }, - { - "@search.action": "upload", - "hotelId": "9", - "hotelName": "Secret Point Motel", - "description": "The hotel is ideally located on the main commercial artery of the city in the heart of New York. A few minutes away is Time's Square and the historic centre of the city, as well as other places of interest that make New York one of America's most attractive and cosmopolitan cities.", - "descriptionFr": "L'hĆ“tel est idĆ©alement situĆ© sur la principale artĆØre commerciale de la ville en plein cœur de New York. A quelques minutes se trouve la place du temps et le centre historique de la ville, ainsi que d'autres lieux d'intĆ©rĆŖt qui font de New York l'une des villes les plus attractives et cosmopolites de l'AmĆ©rique.", - "category": "Boutique", - "tags": [ - "pool", - "air conditioning", - "concierge" - ], - "parkingIncluded": false, - "smokingAllowed": true, - "lastRenovationDate": "1970-01-18T00:00:00-05:00", - "rating": 4, - "location": { - "type": "Point", - "coordinates": [ - -73.975403, - 40.760586 - ] - }, - "address": { - "streetAddress": "677 5th Ave", - "city": "New York", - "stateProvince": "NY", - "country": "USA", - "postalCode": "10022" - }, - "rooms": [ - { - "description": "Budget Room, 1 Queen Bed (Cityside)", - "descriptionFr": "Chambre Ɖconomique, 1 grand lit (cĆ“tĆ© ville)", - "type": "Budget Room", - "baseRate": 9.69, - "bedOptions": "1 Queen Bed", - "sleepsCount": 2, - "smokingAllowed": true, - "tags": [ - "vcr/dvd" - ] - }, - { - "description": "Budget Room, 1 King Bed (Mountain View)", - "descriptionFr": "Chambre Ɖconomique, 1 trĆØs grand lit (Mountain View)", - "type": "Budget Room", - "baseRate": 8.09, - "bedOptions": "1 King Bed", - "sleepsCount": 2, - "smokingAllowed": true, - "tags": [ - "vcr/dvd", - "jacuzzi tub" - ] - } - ] - }, - { - "@search.action": "upload", - "hotelId": "10", - "hotelName": "Countryside Hotel", - "description": "Save up to 50% off traditional hotels. Free WiFi, great location near downtown, full kitchen, washer & dryer, 24/7 support, bowling alley, fitness center and more.", - "descriptionFr": "Ɖconomisez jusqu'Ć  50% sur les hĆ“tels traditionnels. WiFi gratuit, trĆØs bien situĆ© prĆØs du centre-ville, cuisine complĆØte, laveuse & sĆ©cheuse, support 24/7, bowling, centre de fitness et plus encore.", - "category": "Budget", - "tags": [ - "24-hour front desk service", - "coffee in lobby", - "restaurant" - ], - "parkingIncluded": false, - "smokingAllowed": true, - "lastRenovationDate": "1999-09-06T00:00:00+00:00", - "rating": 3, - "location": { - "type": "Point", - "coordinates": [ - -78.940483, - 35.90416 - ] - }, - "address": { - "streetAddress": "6910 Fayetteville Rd", - "city": "Durham", - "stateProvince": "NC", - "country": "USA", - "postalCode": "27713" - }, - "rooms": [ - { - "description": "Suite, 1 King Bed (Amenities)", - "descriptionFr": "Suite, 1 trĆØs grand lit (Services)", - "type": "Suite", - "baseRate": 2.44, - "bedOptions": "1 King Bed", - "sleepsCount": 2, - "smokingAllowed": true, - "tags": [ - "coffee maker" - ] - }, - { - "description": "Budget Room, 1 Queen Bed (Amenities)", - "descriptionFr": "Chambre Ɖconomique, 1 grand lit (Services)", - "type": "Budget Room", - "baseRate": 7.69, - "bedOptions": "1 Queen Bed", - "sleepsCount": 2, - "smokingAllowed": false, - "tags": [ - "coffee maker" - ] - } - ] - } -] diff --git a/sdk/search/azure-search-documents/samples/sample_agentic_retrieval.py b/sdk/search/azure-search-documents/samples/sample_agentic_retrieval.py new file mode 100644 index 000000000000..45177246e5a5 --- /dev/null +++ b/sdk/search/azure-search-documents/samples/sample_agentic_retrieval.py @@ -0,0 +1,202 @@ +# coding: utf-8 + +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# -------------------------------------------------------------------------- + +""" +DESCRIPTION: + Demonstrates Knowledge Source and Knowledge Base CRUD operations and + a minimal retrieval query using a semantic intent. + +USAGE: + python sample_agentic_retrieval.py + + Set the following environment variables before running the sample: + 1) AZURE_SEARCH_SERVICE_ENDPOINT - base URL of your Azure AI Search service + (e.g., https://.search.windows.net) + 2) AZURE_SEARCH_INDEX_NAME - target search index name (e.g., "hotels-sample-index") + 3) AZURE_SEARCH_API_KEY - the admin key for your search service +""" + +import json +import os + +from azure.core.credentials import AzureKeyCredential +from azure.core.exceptions import ResourceNotFoundError +from azure.search.documents.indexes import SearchIndexClient +from azure.search.documents.knowledgebases import KnowledgeBaseRetrievalClient +from azure.search.documents.knowledgebases.models import ( + KnowledgeBaseRetrievalRequest, + KnowledgeRetrievalSemanticIntent, +) +from azure.search.documents.indexes.models import ( + KnowledgeBase, + KnowledgeSourceReference, + KnowledgeRetrievalMinimalReasoningEffort, + SearchIndexFieldReference, + SearchIndexKnowledgeSource, + SearchIndexKnowledgeSourceParameters, +) + +service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] +index_name = os.environ["AZURE_SEARCH_INDEX_NAME"] +key = os.environ["AZURE_SEARCH_API_KEY"] + +knowledge_source_name = "hotels-sample-knowledge-source" +knowledge_base_name = "hotels-sample-knowledge-base" + + +def create_knowledge_source(): + # [START create_knowledge_source] + index_client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) + + knowledge_source = SearchIndexKnowledgeSource( + name=knowledge_source_name, + search_index_parameters=SearchIndexKnowledgeSourceParameters( + search_index_name=index_name + ), + ) + + index_client.create_or_update_knowledge_source(knowledge_source=knowledge_source) + print(f"Created: knowledge source '{knowledge_source_name}'") + # [END create_knowledge_source] + + +def get_knowledge_source(): + # [START get_knowledge_source] + index_client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) + + knowledge_source = index_client.get_knowledge_source(knowledge_source_name) + print(f"Retrieved: knowledge source '{knowledge_source.name}'") + # [END get_knowledge_source] + + +def update_knowledge_source(): + # [START update_knowledge_source] + index_client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) + + knowledge_source = SearchIndexKnowledgeSource( + name=knowledge_source_name, + search_index_parameters=SearchIndexKnowledgeSourceParameters( + search_index_name=index_name, + source_data_fields=[ + SearchIndexFieldReference(name="HotelId"), + SearchIndexFieldReference(name="HotelName"), + SearchIndexFieldReference(name="Description"), + SearchIndexFieldReference(name="Category"), + SearchIndexFieldReference(name="Tags"), + ], + ), + ) + + index_client.create_or_update_knowledge_source(knowledge_source=knowledge_source) + print(f"Updated: knowledge source '{knowledge_source_name}'") + # [END update_knowledge_source] + + +def delete_knowledge_source(): + # [START delete_knowledge_source] + index_client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) + try: + index_client.delete_knowledge_source(knowledge_source_name) + print(f"Deleted: knowledge source '{knowledge_source_name}'") + except ResourceNotFoundError: + print(f"Skipped: knowledge source '{knowledge_source_name}' not found") + # [END delete_knowledge_source] + + +def create_knowledge_base(): + # [START create_knowledge_base] + index_client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) + + knowledge_base = KnowledgeBase( + name=knowledge_base_name, + knowledge_sources=[KnowledgeSourceReference(name=knowledge_source_name)], + ) + + index_client.create_or_update_knowledge_base(knowledge_base) + print(f"Created: knowledge base '{knowledge_base_name}'") + # [END create_knowledge_base] + + +def get_knowledge_base(): + # [START get_knowledge_base] + index_client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) + + knowledge_base = index_client.get_knowledge_base(knowledge_base_name) + print(f"Retrieved: knowledge base '{knowledge_base.name}'") + # [END get_knowledge_base] + + +def update_knowledge_base(): + # [START update_knowledge_base] + index_client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) + + knowledge_base = KnowledgeBase( + name=knowledge_base_name, + knowledge_sources=[KnowledgeSourceReference(name=knowledge_source_name)], + retrieval_reasoning_effort=KnowledgeRetrievalMinimalReasoningEffort(), + ) + + index_client.create_or_update_knowledge_base(knowledge_base) + print(f"Updated: knowledge base '{knowledge_base_name}'") + # [END update_knowledge_base] + + +def retrieve_knowledge_base(): + # [START retrieve_knowledge_base] + retrieval_client = KnowledgeBaseRetrievalClient( + service_endpoint, + knowledge_base_name=knowledge_base_name, + credential=AzureKeyCredential(key), + ) + + request = KnowledgeBaseRetrievalRequest( + intents=[KnowledgeRetrievalSemanticIntent(search="hotels with free wifi")] + ) + + result = retrieval_client.retrieve(request) + print("Results: knowledge base retrieval") + + response_parts = [] + for resp in result.response or []: + for content in resp.content or []: + if hasattr(content, "text"): + response_parts.append(content.text) + + if response_parts: + response_content = "\n\n".join(response_parts) + + items = json.loads(response_content) + for i, item in enumerate(items[:5], start=1): + print(f" Result {i}:") + print(f" Title: {item.get('title')}") + print(f" Content: {item.get('content')}") + else: + print("Results: none") + # [END retrieve_knowledge_base] + + +def delete_knowledge_base(): + # [START delete_knowledge_base] + index_client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) + try: + index_client.delete_knowledge_base(knowledge_base_name) + print(f"Deleted: knowledge base '{knowledge_base_name}'") + except ResourceNotFoundError: + print(f"Skipped: knowledge base '{knowledge_base_name}' not found") + # [END delete_knowledge_base] + + +if __name__ == "__main__": + create_knowledge_source() + get_knowledge_source() + update_knowledge_source() + create_knowledge_base() + get_knowledge_base() + update_knowledge_base() + retrieve_knowledge_base() + delete_knowledge_base() + delete_knowledge_source() diff --git a/sdk/search/azure-search-documents/samples/sample_analyze_text.py b/sdk/search/azure-search-documents/samples/sample_analyze_text.py deleted file mode 100644 index 3a239870ea17..000000000000 --- a/sdk/search/azure-search-documents/samples/sample_analyze_text.py +++ /dev/null @@ -1,45 +0,0 @@ -# coding: utf-8 - -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- - -""" -FILE: sample_analyze_text.py -DESCRIPTION: - This sample demonstrates how to analyze text. -USAGE: - python sample_analyze_text.py - - Set the environment variables with your own values before running the sample: - 1) AZURE_SEARCH_SERVICE_ENDPOINT - the endpoint of your Azure Cognitive Search service - 2) AZURE_SEARCH_INDEX_NAME - the name of your search index (e.g. "hotels-sample-index") - 3) AZURE_SEARCH_API_KEY - your search API key -""" - -import os - -service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] -index_name = os.environ["AZURE_SEARCH_INDEX_NAME"] -key = os.environ["AZURE_SEARCH_API_KEY"] - - -def simple_analyze_text(): - # [START simple_analyze_text] - from azure.core.credentials import AzureKeyCredential - from azure.search.documents.indexes import SearchIndexClient - from azure.search.documents.indexes.models import AnalyzeTextOptions - - client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) - - analyze_request = AnalyzeTextOptions(text="One's ", analyzer_name="standard.lucene") - - result = client.analyze_text(index_name, analyze_request) - print(result.as_dict()) - # [END simple_analyze_text] - - -if __name__ == "__main__": - simple_analyze_text() diff --git a/sdk/search/azure-search-documents/samples/sample_authentication.py b/sdk/search/azure-search-documents/samples/sample_authentication.py index f690c7ff7e17..90a29836d859 100644 --- a/sdk/search/azure-search-documents/samples/sample_authentication.py +++ b/sdk/search/azure-search-documents/samples/sample_authentication.py @@ -1,31 +1,29 @@ # coding: utf-8 # ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. # -------------------------------------------------------------------------- """ -FILE: sample_authentication.py DESCRIPTION: - This sample demonstrates how to authenticate with the Azure Congnitive Search - service with an API key. See more details about authentication here: - https://learn.microsoft.com/azure.search.documents/search-security-api-keys + Demonstrates how to authenticate with the Azure AI Search service. + USAGE: python sample_authentication.py - Set the environment variables with your own values before running the sample: - 1) AZURE_SEARCH_SERVICE_ENDPOINT - the endpoint of your Azure Cognitive Search service - 2) AZURE_SEARCH_INDEX_NAME - the name of your search index (e.g. "hotels-sample-index") - 3) AZURE_SEARCH_API_KEY - your search API key + Set the following environment variables before running the sample: + 1) AZURE_SEARCH_SERVICE_ENDPOINT - base URL of your Azure AI Search service + (e.g., https://.search.windows.net) + 2) AZURE_SEARCH_INDEX_NAME - target search index name (e.g., "hotels-sample-index") + 3) AZURE_SEARCH_API_KEY - the admin key for your search service """ import os -def authentication_with_api_key_credential(): - # [START create_search_client_with_key] +def authenticate_search_client_with_api_key(): + # [START authenticate_search_client_with_api_key] from azure.core.credentials import AzureKeyCredential from azure.search.documents import SearchClient @@ -34,27 +32,31 @@ def authentication_with_api_key_credential(): key = os.environ["AZURE_SEARCH_API_KEY"] search_client = SearchClient(service_endpoint, index_name, AzureKeyCredential(key)) - # [END create_search_client_with_key] + # [END authenticate_search_client_with_api_key] - result = search_client.get_document_count() + document_count = search_client.get_document_count() - print("There are {} documents in the {} search index.".format(result, index_name)) + print(f"Document count: {document_count} (index '{index_name}')") -def authentication_service_client_with_api_key_credential(): - # [START create_search_service_client_with_key] +def authenticate_index_client_with_api_key(): + # [START authenticate_index_client_with_api_key] from azure.core.credentials import AzureKeyCredential from azure.search.documents.indexes import SearchIndexClient service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] key = os.environ["AZURE_SEARCH_API_KEY"] - search_client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) - # [END create_search_service_client_with_key] + search_index_client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) + # [END authenticate_index_client_with_api_key] + + result = search_index_client.list_indexes() + names = [x.name for x in result] + print(f"Indexes ({len(names)}): {', '.join(names)}") -def authentication_with_aad(): - # [START authentication_with_aad] +def authenticate_search_client_with_aad(): + # [START authenticate_search_client_with_aad] from azure.identity import DefaultAzureCredential from azure.search.documents import SearchClient @@ -63,27 +65,31 @@ def authentication_with_aad(): credential = DefaultAzureCredential() search_client = SearchClient(service_endpoint, index_name, credential) - # [END authentication_with_aad] + # [END authenticate_search_client_with_aad] - result = search_client.get_document_count() + document_count = search_client.get_document_count() - print("There are {} documents in the {} search index.".format(result, index_name)) + print(f"Document count: {document_count} (index '{index_name}')") -def authentication_service_client_with_aad(): - # [START authentication_service_client_with_aad] +def authenticate_index_client_with_aad(): + # [START authenticate_index_client_with_aad] from azure.identity import DefaultAzureCredential from azure.search.documents.indexes import SearchIndexClient service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] credential = DefaultAzureCredential() - search_client = SearchIndexClient(service_endpoint, credential) - # [END authentication_service_client_with_aad] + search_index_client = SearchIndexClient(service_endpoint, credential) + # [END authenticate_index_client_with_aad] + + result = search_index_client.list_indexes() + names = [x.name for x in result] + print(f"Indexes ({len(names)}): {', '.join(names)}") if __name__ == "__main__": - authentication_with_api_key_credential() - authentication_service_client_with_api_key_credential() - authentication_with_aad() - authentication_service_client_with_aad() + authenticate_search_client_with_api_key() + authenticate_index_client_with_api_key() + authenticate_search_client_with_aad() + authenticate_index_client_with_aad() diff --git a/sdk/search/azure-search-documents/samples/sample_buffered_sender.py b/sdk/search/azure-search-documents/samples/sample_buffered_sender.py deleted file mode 100644 index db7de92154a2..000000000000 --- a/sdk/search/azure-search-documents/samples/sample_buffered_sender.py +++ /dev/null @@ -1,51 +0,0 @@ -# coding: utf-8 - -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- - -""" -FILE: sample_batch_client.py -DESCRIPTION: - This sample demonstrates how to upload, merge, or delete documents using SearchIndexingBufferedSender. -USAGE: - python sample_batch_client.py - - Set the environment variables with your own values before running the sample: - 1) AZURE_SEARCH_SERVICE_ENDPOINT - the endpoint of your Azure Cognitive Search service - 2) AZURE_SEARCH_INDEX_NAME - the name of your search index (e.g. "hotels-sample-index") - 3) AZURE_SEARCH_API_KEY - your search API key -""" - -import os - -service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] -index_name = os.environ["AZURE_SEARCH_INDEX_NAME"] -key = os.environ["AZURE_SEARCH_API_KEY"] - -from azure.core.credentials import AzureKeyCredential -from azure.search.documents import SearchIndexingBufferedSender - - -def sample_batching_client(): - DOCUMENT = { - "category": "Hotel", - "hotelId": "1000", - "rating": 4.0, - "rooms": [], - "hotelName": "Azure Inn", - } - - with SearchIndexingBufferedSender(service_endpoint, index_name, AzureKeyCredential(key)) as batch_client: - # add upload actions - batch_client.upload_documents(documents=[DOCUMENT]) - # add merge actions - batch_client.merge_documents(documents=[{"hotelId": "1000", "rating": 4.5}]) - # add delete actions - batch_client.delete_documents(documents=[{"hotelId": "1000"}]) - - -if __name__ == "__main__": - sample_batching_client() diff --git a/sdk/search/azure-search-documents/samples/sample_crud_operations.py b/sdk/search/azure-search-documents/samples/sample_crud_operations.py deleted file mode 100644 index de22c6b5cb82..000000000000 --- a/sdk/search/azure-search-documents/samples/sample_crud_operations.py +++ /dev/null @@ -1,67 +0,0 @@ -# coding: utf-8 - -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- - -""" -FILE: sample_crud_operations.py -DESCRIPTION: - This sample demonstrates how to upload, merge, or delete documents from an - Azure Search index. -USAGE: - python sample_crud_operations.py - - Set the environment variables with your own values before running the sample: - 1) AZURE_SEARCH_SERVICE_ENDPOINT - the endpoint of your Azure Cognitive Search service - 2) AZURE_SEARCH_INDEX_NAME - the name of your search index (e.g. "hotels-sample-index") - 3) AZURE_SEARCH_API_KEY - your search API key -""" - -import os - -service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] -index_name = os.environ["AZURE_SEARCH_INDEX_NAME"] -key = os.environ["AZURE_SEARCH_API_KEY"] - -from azure.core.credentials import AzureKeyCredential -from azure.search.documents import SearchClient - -search_client = SearchClient(service_endpoint, index_name, AzureKeyCredential(key)) - - -def upload_document(): - # [START upload_document] - DOCUMENT = { - "hotelId": "1000", - "hotelName": "Azure Inn", - } - - result = search_client.upload_documents(documents=[DOCUMENT]) - - print("Upload of new document succeeded: {}".format(result[0].succeeded)) - # [END upload_document] - - -def merge_document(): - # [START merge_document] - result = search_client.merge_documents(documents=[{"hotelId": "783", "hotelName": "Renovated Ranch"}]) - - print("Merge into new document succeeded: {}".format(result[0].succeeded)) - # [END merge_document] - - -def delete_document(): - # [START delete_document] - result = search_client.delete_documents(documents=[{"hotelId": "1000"}]) - - print("Delete new document succeeded: {}".format(result[0].succeeded)) - # [END delete_document] - - -if __name__ == "__main__": - upload_document() - merge_document() - delete_document() diff --git a/sdk/search/azure-search-documents/samples/sample_data_source_operations.py b/sdk/search/azure-search-documents/samples/sample_data_source_operations.py deleted file mode 100644 index cf14b3fa25cf..000000000000 --- a/sdk/search/azure-search-documents/samples/sample_data_source_operations.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding: utf-8 - -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- - -""" -FILE: sample_data_source_operations.py -DESCRIPTION: - This sample demonstrates how to get, create, update, or delete a Data Source. -USAGE: - python sample_data_source_operations.py - - Set the environment variables with your own values before running the sample: - 1) AZURE_SEARCH_SERVICE_ENDPOINT - the endpoint of your Azure Cognitive Search service - 2) AZURE_SEARCH_API_KEY - your search API key -""" - -import os - -service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] -key = os.environ["AZURE_SEARCH_API_KEY"] -connection_string = os.environ["AZURE_STORAGE_CONNECTION_STRING"] - -from azure.core.credentials import AzureKeyCredential -from azure.search.documents.indexes import SearchIndexerClient -from azure.search.documents.indexes.models import SearchIndexerDataContainer, SearchIndexerDataSourceConnection - -client = SearchIndexerClient(service_endpoint, AzureKeyCredential(key)) - - -def create_data_source_connection(): - # [START create_data_source_connection] - container = SearchIndexerDataContainer(name="searchcontainer") - data_source_connection = SearchIndexerDataSourceConnection( - name="sample-data-source-connection", type="azureblob", connection_string=connection_string, container=container - ) - result = client.create_data_source_connection(data_source_connection) - print(result) - print("Create new Data Source Connection - sample-data-source-connection") - # [END create_data_source_connection] - - -def list_data_source_connections(): - # [START list_data_source_connection] - result = client.get_data_source_connections() - names = [ds.name for ds in result] - print("Found {} Data Source Connections in the service: {}".format(len(result), ", ".join(names))) - # [END list_data_source_connection] - - -def get_data_source_connection(): - # [START get_data_source_connection] - result = client.get_data_source_connection("sample-data-source-connection") - print("Retrived Data Source Connection 'sample-data-source-connection'") - # [END get_data_source_connection] - - -def delete_data_source_connection(): - # [START delete_data_source_connection] - client.delete_data_source_connection("sample-data-source-connection") - print("Data Source Connection 'sample-data-source-connection' successfully deleted") - # [END delete_data_source_connection] - - -if __name__ == "__main__": - create_data_source_connection() - list_data_source_connections() - get_data_source_connection() - delete_data_source_connection() diff --git a/sdk/search/azure-search-documents/samples/sample_documents_buffered_sender.py b/sdk/search/azure-search-documents/samples/sample_documents_buffered_sender.py new file mode 100644 index 000000000000..f514de4b2837 --- /dev/null +++ b/sdk/search/azure-search-documents/samples/sample_documents_buffered_sender.py @@ -0,0 +1,73 @@ +# coding: utf-8 + +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# -------------------------------------------------------------------------- + +""" +DESCRIPTION: + Demonstrates how to use the SearchIndexingBufferedSender for high-throughput indexing. + +USAGE: + python sample_documents_buffered_sender.py + + Set the following environment variables before running the sample: + 1) AZURE_SEARCH_SERVICE_ENDPOINT - base URL of your Azure AI Search service + (e.g., https://.search.windows.net) + 2) AZURE_SEARCH_INDEX_NAME - target search index name (e.g., "hotels-sample-index") + 3) AZURE_SEARCH_API_KEY - the admin key for your search service +""" + +import os + +service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] +index_name = os.environ["AZURE_SEARCH_INDEX_NAME"] +key = os.environ["AZURE_SEARCH_API_KEY"] + + +def sample_batching_client(): + # [START sample_batching_client] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents import SearchIndexingBufferedSender + + document = { + "HotelId": "100", + "HotelName": "Azure Sanctuary", + "Description": "A quiet retreat offering understated elegance and premium amenities.", + "Description_fr": "Meilleur hĆ“tel en ville si vous aimez les hĆ“tels de luxe.", + "Category": "Luxury", + "Tags": [ + "pool", + "view", + "wifi", + "concierge", + "private beach", + "gourmet dining", + "spa", + ], + "ParkingIncluded": False, + "LastRenovationDate": "2024-01-15T00:00:00+00:00", + "Rating": 5, + "Location": {"type": "Point", "coordinates": [-122.131577, 47.678581]}, + } + + with SearchIndexingBufferedSender( + service_endpoint, index_name, AzureKeyCredential(key) + ) as buffered_sender: + # add upload actions + buffered_sender.upload_documents(documents=[document]) + print(f"Uploaded: document {document['HotelId']}") + + # add merge actions + buffered_sender.merge_documents(documents=[{"HotelId": "100", "Rating": 4.5}]) + print(f"Merged: document {document['HotelId']}") + + # add delete actions + buffered_sender.delete_documents(documents=[{"HotelId": "100"}]) + print(f"Deleted: document {document['HotelId']}") + # [END sample_batching_client] + + +if __name__ == "__main__": + sample_batching_client() diff --git a/sdk/search/azure-search-documents/samples/sample_documents_crud.py b/sdk/search/azure-search-documents/samples/sample_documents_crud.py new file mode 100644 index 000000000000..2b7fe2ce17c7 --- /dev/null +++ b/sdk/search/azure-search-documents/samples/sample_documents_crud.py @@ -0,0 +1,110 @@ +# coding: utf-8 + +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# -------------------------------------------------------------------------- + +""" +DESCRIPTION: + Demonstrates how to upload, merge, get, and delete documents. + +USAGE: + python sample_documents_crud.py + + Set the following environment variables before running the sample: + 1) AZURE_SEARCH_SERVICE_ENDPOINT - base URL of your Azure AI Search service + (e.g., https://.search.windows.net) + 2) AZURE_SEARCH_INDEX_NAME - target search index name (e.g., "hotels-sample-index") + 3) AZURE_SEARCH_API_KEY - the admin key for your search service +""" + +import os + +service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] +index_name = os.environ["AZURE_SEARCH_INDEX_NAME"] +key = os.environ["AZURE_SEARCH_API_KEY"] + + +def upload_document(): + # [START upload_document] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents import SearchClient + + search_client = SearchClient(service_endpoint, index_name, AzureKeyCredential(key)) + + document = { + "HotelId": "100", + "HotelName": "Azure Sanctuary", + "Description": "A quiet retreat offering understated elegance and premium amenities.", + "Description_fr": "Meilleur hĆ“tel en ville si vous aimez les hĆ“tels de luxe.", + "Category": "Luxury", + "Tags": [ + "pool", + "view", + "wifi", + "concierge", + "private beach", + "gourmet dining", + "spa", + ], + "ParkingIncluded": False, + "LastRenovationDate": "2024-01-15T00:00:00+00:00", + "Rating": 5, + "Location": {"type": "Point", "coordinates": [-122.131577, 47.678581]}, + } + + result = search_client.upload_documents(documents=[document]) + + print(f"Uploaded: document 100 (succeeded={result[0].succeeded})") + # [END upload_document] + + +def merge_document(): + # [START merge_document] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents import SearchClient + + search_client = SearchClient(service_endpoint, index_name, AzureKeyCredential(key)) + + result = search_client.merge_documents( + documents=[{"HotelId": "100", "HotelName": "Azure Sanctuary & Spa"}] + ) + + print(f"Merged: document 100 (succeeded={result[0].succeeded})") + # [END merge_document] + + +def get_document(): + # [START get_document] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents import SearchClient + + search_client = SearchClient(service_endpoint, index_name, AzureKeyCredential(key)) + + result = search_client.get_document(key="100") + + print("Result:") + print(f" HotelId: 100") + print(f" HotelName: {result['HotelName']}") + # [END get_document] + + +def delete_document(): + # [START delete_document] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents import SearchClient + + search_client = SearchClient(service_endpoint, index_name, AzureKeyCredential(key)) + + result = search_client.delete_documents(documents=[{"HotelId": "100"}]) + + print(f"Deleted: document 100 (succeeded={result[0].succeeded})") + # [END delete_document] + + +if __name__ == "__main__": + upload_document() + merge_document() + get_document() + delete_document() diff --git a/sdk/search/azure-search-documents/samples/sample_facet_query.py b/sdk/search/azure-search-documents/samples/sample_facet_query.py deleted file mode 100644 index a70078173d11..000000000000 --- a/sdk/search/azure-search-documents/samples/sample_facet_query.py +++ /dev/null @@ -1,49 +0,0 @@ -# coding: utf-8 - -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- - -""" -FILE: sample_facet_query.py -DESCRIPTION: - This sample demonstrates how to obtain search facets on specified field in - an Azure Search index. -USAGE: - python sample_facet_query.py - - Set the environment variables with your own values before running the sample: - 1) AZURE_SEARCH_SERVICE_ENDPOINT - the endpoint of your Azure Cognitive Search service - 2) AZURE_SEARCH_INDEX_NAME - the name of your search index (e.g. "hotels-sample-index") - 3) AZURE_SEARCH_API_KEY - your search API key -""" - -from typing import List, Dict, cast -import os - -service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] -index_name = os.environ["AZURE_SEARCH_INDEX_NAME"] -key = os.environ["AZURE_SEARCH_API_KEY"] - - -def filter_query(): - # [START facet_query] - from azure.core.credentials import AzureKeyCredential - from azure.search.documents import SearchClient - - search_client = SearchClient(service_endpoint, index_name, AzureKeyCredential(key)) - - results = search_client.search(search_text="WiFi", facets=["category,count:3", "parkingIncluded"]) - - facets: Dict[str, List[str]] = cast(Dict[str, List[str]], results.get_facets()) - - print("Catgory facet counts for hotels:") - for facet in facets["category"]: - print(" {}".format(facet)) - # [END facet_query] - - -if __name__ == "__main__": - filter_query() diff --git a/sdk/search/azure-search-documents/samples/sample_filter_query.py b/sdk/search/azure-search-documents/samples/sample_filter_query.py deleted file mode 100644 index 432edf9a1f02..000000000000 --- a/sdk/search/azure-search-documents/samples/sample_filter_query.py +++ /dev/null @@ -1,51 +0,0 @@ -# coding: utf-8 - -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- - -""" -FILE: sample_filter_query.py -DESCRIPTION: - This sample demonstrates how search results from an Azure Search index can - be filtered and ordered. -USAGE: - python sample_filter_query.py - - Set the environment variables with your own values before running the sample: - 1) AZURE_SEARCH_SERVICE_ENDPOINT - the endpoint of your Azure Cognitive Search service - 2) AZURE_SEARCH_INDEX_NAME - the name of your search index (e.g. "hotels-sample-index") - 3) AZURE_SEARCH_API_KEY - your search API key -""" - -import os - -service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] -index_name = os.environ["AZURE_SEARCH_INDEX_NAME"] -key = os.environ["AZURE_SEARCH_API_KEY"] - - -def filter_query(): - # [START filter_query] - from azure.core.credentials import AzureKeyCredential - from azure.search.documents import SearchClient - - search_client = SearchClient(service_endpoint, index_name, AzureKeyCredential(key)) - - results = search_client.search( - search_text="WiFi", - filter="Address/StateProvince eq 'FL' and Address/Country eq 'USA'", - select=["hotelName", "rating"], - order_by=["rating desc"], - ) - - print("Florida hotels containing 'WiFi', sorted by Rating:") - for result in results: - print(" Name: {} (rating {})".format(result["hotelName"], result["rating"])) - # [END filter_query] - - -if __name__ == "__main__": - filter_query() diff --git a/sdk/search/azure-search-documents/samples/sample_get_document.py b/sdk/search/azure-search-documents/samples/sample_get_document.py deleted file mode 100644 index 0a5addfa14cb..000000000000 --- a/sdk/search/azure-search-documents/samples/sample_get_document.py +++ /dev/null @@ -1,45 +0,0 @@ -# coding: utf-8 - -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- - -""" -FILE: sample_get_document.py -DESCRIPTION: - This sample demonstrates how to retrieve a specific document by key from an - Azure Search index. -USAGE: - python sample_get_document.py - - Set the environment variables with your own values before running the sample: - 1) AZURE_SEARCH_SERVICE_ENDPOINT - the endpoint of your Azure Cognitive Search service - 2) AZURE_SEARCH_INDEX_NAME - the name of your search index (e.g. "hotels-sample-index") - 3) AZURE_SEARCH_API_KEY - your search API key -""" - -import os - -service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] -index_name = os.environ["AZURE_SEARCH_INDEX_NAME"] -key = os.environ["AZURE_SEARCH_API_KEY"] - - -def get_document(): - # [START get_document] - from azure.core.credentials import AzureKeyCredential - from azure.search.documents import SearchClient - - search_client = SearchClient(service_endpoint, index_name, AzureKeyCredential(key)) - - result = search_client.get_document(key="23") - - print("Details for hotel '23' are:") - print(" Name: {}".format(result["hotelName"])) - # [END get_document] - - -if __name__ == "__main__": - get_document() diff --git a/sdk/search/azure-search-documents/samples/sample_index_alias_crud.py b/sdk/search/azure-search-documents/samples/sample_index_alias_crud.py new file mode 100644 index 000000000000..4851509a2523 --- /dev/null +++ b/sdk/search/azure-search-documents/samples/sample_index_alias_crud.py @@ -0,0 +1,137 @@ +# coding: utf-8 + +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# -------------------------------------------------------------------------- + +""" +DESCRIPTION: + Demonstrates how to create, get, update, and delete an index alias. + +USAGE: + python sample_index_alias_crud.py + + Set the following environment variables before running the sample: + 1) AZURE_SEARCH_SERVICE_ENDPOINT - base URL of your Azure AI Search service + (e.g., https://.search.windows.net) + 2) AZURE_SEARCH_API_KEY - the admin key for your search service + 3) AZURE_SEARCH_INDEX_NAME - target search index name (e.g., "hotels-sample-index") +""" + + +import os + +service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] +index_name = os.environ["AZURE_SEARCH_INDEX_NAME"] +key = os.environ["AZURE_SEARCH_API_KEY"] +alias_name = "hotel-alias" +new_index_name = "hotels-sample-index-v2" + + +def create_alias(): + # [START create_alias] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.indexes import SearchIndexClient + from azure.search.documents.indexes.models import SearchAlias + + index_client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) + + alias = SearchAlias(name=alias_name, indexes=[index_name]) + result = index_client.create_alias(alias) + print(f"Created: alias '{result.name}' -> index '{index_name}'") + # [END create_alias] + + +def get_alias(): + # [START get_alias] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.indexes import SearchIndexClient + + index_client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) + + result = index_client.get_alias(alias_name) + print(f"Retrieved: alias '{result.name}'") + # [END get_alias] + + +def update_alias(): + # [START update_alias] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.indexes import SearchIndexClient + from azure.search.documents.indexes.models import ( + ComplexField, + CorsOptions, + ScoringProfile, + SearchAlias, + SearchIndex, + SimpleField, + SearchableField, + SearchFieldDataType, + ) + + index_client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) + + # Create a new index with a different schema or settings + # In a real scenario, this would be your updated index version (e.g., v2) + fields = [ + SimpleField(name="HotelId", type=SearchFieldDataType.String, key=True), + SimpleField(name="BaseRate", type=SearchFieldDataType.Double), + SearchableField( + name="Description", type=SearchFieldDataType.String, collection=True + ), + SearchableField(name="HotelName", type=SearchFieldDataType.String), + ComplexField( + name="Address", + fields=[ + SimpleField(name="StreetAddress", type=SearchFieldDataType.String), + SimpleField(name="City", type=SearchFieldDataType.String), + SimpleField(name="State", type=SearchFieldDataType.String), + ], + collection=True, + ), + ] + cors_options = CorsOptions(allowed_origins=["*"], max_age_in_seconds=60) + scoring_profile = ScoringProfile(name="MyProfile") + index = SearchIndex( + name=new_index_name, + fields=fields, + scoring_profiles=[scoring_profile], + cors_options=cors_options, + ) + + index_client.create_or_update_index(index=index) + print(f"Created: index '{new_index_name}'") + + # Update the alias to point to the new index + # This operation is atomic and ensures zero downtime for applications using the alias + alias = SearchAlias(name=alias_name, indexes=[new_index_name]) + result = index_client.create_or_update_alias(alias) + print(f"Updated: alias '{result.name}' -> index '{new_index_name}'") + # [END update_alias] + + +def delete_alias(): + # [START delete_alias] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.indexes import SearchIndexClient + from azure.core.exceptions import ResourceNotFoundError + + index_client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) + + index_client.delete_alias(alias_name) + print(f"Deleted: alias '{alias_name}'") + + try: + index_client.delete_index(new_index_name) + print(f"Deleted: index '{new_index_name}'") + except ResourceNotFoundError: + print(f"Skipped: index '{new_index_name}' not found") + # [END delete_alias] + + +if __name__ == "__main__": + create_alias() + get_alias() + update_alias() + delete_alias() diff --git a/sdk/search/azure-search-documents/samples/sample_index_alias_crud_operations.py b/sdk/search/azure-search-documents/samples/sample_index_alias_crud_operations.py deleted file mode 100644 index 52220f12b8e4..000000000000 --- a/sdk/search/azure-search-documents/samples/sample_index_alias_crud_operations.py +++ /dev/null @@ -1,105 +0,0 @@ -# coding: utf-8 - -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- - -""" -FILE: sample_index_alias_crud_operations.py -DESCRIPTION: - This sample demonstrates how to get, create, update, or delete an alias with an existing index. -USAGE: - python sample_index_alias_crud_operations.py - - Set the environment variables with your own values before running the sample: - 1) AZURE_SEARCH_SERVICE_ENDPOINT - the endpoint of your Azure Cognitive Search service - 2) AZURE_SEARCH_API_KEY - your search API key - 3) AZURE_SEARCH_INDEX_NAME - the name of your search index (e.g. "hotels-sample-index") -""" - - -import os - -service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] -index_name = os.environ["AZURE_SEARCH_INDEX_NAME"] -key = os.environ["AZURE_SEARCH_API_KEY"] -alias_name = "motels" - -from azure.core.credentials import AzureKeyCredential -from azure.search.documents.indexes import SearchIndexClient -from azure.search.documents.indexes.models import ( - ComplexField, - CorsOptions, - ScoringProfile, - SearchAlias, - SearchIndex, - SimpleField, - SearchableField, - SearchFieldDataType, -) - - -client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) - - -def create_alias(): - # [START create_alias] - alias = SearchAlias(name=alias_name, indexes=[index_name]) - result = client.create_alias(alias) - # [END create_alias] - - -def get_alias(): - # [START get_alias] - result = client.get_alias(alias_name) - # [END get_alias] - - -def update_alias(): - # [START update_alias] - new_index_name = "hotels" - fields = [ - SimpleField(name="hotelId", type=SearchFieldDataType.String, key=True), - SimpleField(name="baseRate", type=SearchFieldDataType.Double), - SearchableField(name="description", type=SearchFieldDataType.String, collection=True), - SearchableField(name="hotelName", type=SearchFieldDataType.String), - ComplexField( - name="address", - fields=[ - SimpleField(name="streetAddress", type=SearchFieldDataType.String), - SimpleField(name="city", type=SearchFieldDataType.String), - SimpleField(name="state", type=SearchFieldDataType.String), - ], - collection=True, - ), - ] - cors_options = CorsOptions(allowed_origins=["*"], max_age_in_seconds=60) - scoring_profile = ScoringProfile(name="MyProfile") - scoring_profiles = [] - scoring_profiles.append(scoring_profile) - index = SearchIndex( - name=new_index_name, fields=fields, scoring_profiles=scoring_profiles, cors_options=cors_options - ) - - result_index = client.create_or_update_index(index=index) - - alias = SearchAlias(name=alias_name, indexes=[new_index_name]) - result = client.create_or_update_alias(alias) - - # [END update_alias] - - -def delete_alias(): - # [START delete_alias] - - client.delete_alias(alias_name) - # [END delete_alias] - - -if __name__ == "__main__": - create_alias() - get_alias() - update_alias() - delete_alias() diff --git a/sdk/search/azure-search-documents/samples/sample_index_analyze_text.py b/sdk/search/azure-search-documents/samples/sample_index_analyze_text.py new file mode 100644 index 000000000000..ff5fd6d0110c --- /dev/null +++ b/sdk/search/azure-search-documents/samples/sample_index_analyze_text.py @@ -0,0 +1,52 @@ +# coding: utf-8 + +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# -------------------------------------------------------------------------- + +""" +DESCRIPTION: + Demonstrates how to analyze text using a specific analyzer. + +USAGE: + python sample_index_analyze_text.py + + Set the following environment variables before running the sample: + 1) AZURE_SEARCH_SERVICE_ENDPOINT - base URL of your Azure AI Search service + (e.g., https://.search.windows.net) + 2) AZURE_SEARCH_INDEX_NAME - target search index name (e.g., "hotels-sample-index") + 3) AZURE_SEARCH_API_KEY - the admin key for your search service +""" + +import os + +service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] +index_name = os.environ["AZURE_SEARCH_INDEX_NAME"] +key = os.environ["AZURE_SEARCH_API_KEY"] + + +def simple_analyze_text(): + # [START simple_analyze_text] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.indexes import SearchIndexClient + from azure.search.documents.indexes.models import AnalyzeTextOptions + + index_client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) + + analyze_request = AnalyzeTextOptions( + text="One's ", analyzer_name="standard.lucene" + ) + + analysis_result = index_client.analyze_text(index_name, analyze_request) + + print("Results:") + for token in analysis_result.tokens: + print( + f" Token: {token.token}, Start: {token.start_offset}, End: {token.end_offset}" + ) + # [END simple_analyze_text] + + +if __name__ == "__main__": + simple_analyze_text() diff --git a/sdk/search/azure-search-documents/samples/sample_index_client_custom_request.py b/sdk/search/azure-search-documents/samples/sample_index_client_custom_request.py new file mode 100644 index 000000000000..636e8f0ba2d1 --- /dev/null +++ b/sdk/search/azure-search-documents/samples/sample_index_client_custom_request.py @@ -0,0 +1,51 @@ +# coding: utf-8 + +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# -------------------------------------------------------------------------- + +""" +DESCRIPTION: + Demonstrates how to make custom HTTP requests using SearchIndexClient. + +USAGE: + python sample_index_client_custom_request.py + + Set the following environment variables before running the sample: + 1) AZURE_SEARCH_SERVICE_ENDPOINT - base URL of your Azure AI Search service + (e.g., https://.search.windows.net) + 2) AZURE_SEARCH_INDEX_NAME - target search index name (e.g., "hotels-sample-index") + 3) AZURE_SEARCH_API_KEY - the admin key for your search service +""" + + +def sample_send_request(): + # [START sample_send_request] + import os + from azure.core.credentials import AzureKeyCredential + from azure.core.rest import HttpRequest + from azure.search.documents.indexes import SearchIndexClient + from sample_utils import AZURE_SEARCH_API_VERSION + + endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] + index_name = os.environ["AZURE_SEARCH_INDEX_NAME"] + key = os.environ["AZURE_SEARCH_API_KEY"] + + index_client = SearchIndexClient(endpoint, AzureKeyCredential(key)) + + # The `send_request` method can send custom HTTP requests that share the client's existing pipeline, + # while adding convenience for endpoint construction. + request = HttpRequest( + method="GET", + url=f"/indexes('{index_name}')?api-version={AZURE_SEARCH_API_VERSION}", + ) + response = index_client.send_request(request) + response.raise_for_status() + response_body = response.json() + print(f"Response: {response_body}") + # [END sample_send_request] + + +if __name__ == "__main__": + sample_send_request() diff --git a/sdk/search/azure-search-documents/samples/sample_index_client_send_request.py b/sdk/search/azure-search-documents/samples/sample_index_client_send_request.py deleted file mode 100644 index f8b373b1970b..000000000000 --- a/sdk/search/azure-search-documents/samples/sample_index_client_send_request.py +++ /dev/null @@ -1,47 +0,0 @@ -# coding: utf-8 - -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- - -""" -FILE: sample_index_client_send_request.py - -DESCRIPTION: - This sample demonstrates how to make custom HTTP requests through a client pipeline. - -USAGE: - python sample_index_client_send_request.py - - Set the environment variables with your own values before running the sample: - 1) AZURE_SEARCH_SERVICE_ENDPOINT - the endpoint of your Azure Cognitive Search service - 2) AZURE_SEARCH_INDEX_NAME - the name of your search index (e.g. "hotels-sample-index") - 3) AZURE_SEARCH_API_KEY - your search API key -""" - -import os -from azure.core.credentials import AzureKeyCredential -from azure.core.rest import HttpRequest -from azure.search.documents.indexes import SearchIndexClient - - -def sample_send_request(): - endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] - index_name = os.environ["AZURE_SEARCH_INDEX_NAME"] - key = os.environ["AZURE_SEARCH_API_KEY"] - - client = SearchIndexClient(endpoint, AzureKeyCredential(key)) - - # The `send_request` method can send custom HTTP requests that share the client's existing pipeline, - # while adding convenience for endpoint construction. - request = HttpRequest(method="GET", url=f"/indexes('{index_name}')?api-version=2024-05-01-preview") - response = client.send_request(request) - response.raise_for_status() - response_body = response.json() - print(response_body) - - -if __name__ == "__main__": - sample_send_request() diff --git a/sdk/search/azure-search-documents/samples/sample_index_crud.py b/sdk/search/azure-search-documents/samples/sample_index_crud.py new file mode 100644 index 000000000000..6d0b13aad31a --- /dev/null +++ b/sdk/search/azure-search-documents/samples/sample_index_crud.py @@ -0,0 +1,149 @@ +# coding: utf-8 + +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# -------------------------------------------------------------------------- + +""" +DESCRIPTION: + Demonstrates how to create, get, update, and delete a search index. + +USAGE: + python sample_index_crud.py + + Set the following environment variables before running the sample: + 1) AZURE_SEARCH_SERVICE_ENDPOINT - base URL of your Azure AI Search service + (e.g., https://.search.windows.net) + 2) AZURE_SEARCH_API_KEY - the admin key for your search service +""" + + +import os +from typing import List + +service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] +key = os.environ["AZURE_SEARCH_API_KEY"] +index_name = "hotels-sample-index-index-crud" + + +def create_index(): + # [START create_index] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.indexes import SearchIndexClient + from azure.search.documents.indexes.models import ( + ComplexField, + CorsOptions, + SearchIndex, + ScoringProfile, + SearchFieldDataType, + SimpleField, + SearchableField, + ) + + index_client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) + fields = [ + SimpleField(name="HotelId", type=SearchFieldDataType.String, key=True), + SimpleField(name="HotelName", type=SearchFieldDataType.String, searchable=True), + SimpleField(name="BaseRate", type=SearchFieldDataType.Double), + SearchableField( + name="Description", type=SearchFieldDataType.String, collection=True + ), + ComplexField( + name="Address", + fields=[ + SimpleField(name="StreetAddress", type=SearchFieldDataType.String), + SimpleField(name="City", type=SearchFieldDataType.String), + ], + collection=True, + ), + ] + cors_options = CorsOptions(allowed_origins=["*"], max_age_in_seconds=60) + scoring_profiles: List[ScoringProfile] = [] + index = SearchIndex( + name=index_name, + fields=fields, + scoring_profiles=scoring_profiles, + cors_options=cors_options, + ) + + result = index_client.create_index(index) + print(f"Created: index '{result.name}'") + # [END create_index] + + +def get_index(): + # [START get_index] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.indexes import SearchIndexClient + + index_client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) + result = index_client.get_index(index_name) + print(f"Retrieved: index '{result.name}'") + # [END get_index] + + +def update_index(): + # [START update_index] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.indexes import SearchIndexClient + from azure.search.documents.indexes.models import ( + ComplexField, + CorsOptions, + SearchIndex, + ScoringProfile, + SearchFieldDataType, + SimpleField, + SearchableField, + ) + + index_client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) + fields = [ + SimpleField(name="HotelId", type=SearchFieldDataType.String, key=True), + SimpleField(name="HotelName", type=SearchFieldDataType.String, searchable=True), + SimpleField(name="BaseRate", type=SearchFieldDataType.Double), + SearchableField( + name="Description", type=SearchFieldDataType.String, collection=True + ), + ComplexField( + name="Address", + fields=[ + SimpleField(name="StreetAddress", type=SearchFieldDataType.String), + SimpleField(name="City", type=SearchFieldDataType.String), + SimpleField(name="State", type=SearchFieldDataType.String), + ], + collection=True, + ), + ] + cors_options = CorsOptions(allowed_origins=["*"], max_age_in_seconds=60) + scoring_profile = ScoringProfile(name="MyProfile") + scoring_profiles = [] + scoring_profiles.append(scoring_profile) + index = SearchIndex( + name=index_name, + fields=fields, + scoring_profiles=scoring_profiles, + cors_options=cors_options, + ) + + result = index_client.create_or_update_index(index=index) + print(f"Updated: index '{result.name}'") + # [END update_index] + + +def delete_index(): + # [START delete_index] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.indexes import SearchIndexClient + + index_client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) + index_client.delete_index(index_name) + print(f"Deleted: index '{index_name}'") + # [END delete_index] + + +if __name__ == "__main__": + create_index() + get_index() + update_index() + delete_index() diff --git a/sdk/search/azure-search-documents/samples/sample_index_crud_operations.py b/sdk/search/azure-search-documents/samples/sample_index_crud_operations.py deleted file mode 100644 index 6b0cabb2c2dd..000000000000 --- a/sdk/search/azure-search-documents/samples/sample_index_crud_operations.py +++ /dev/null @@ -1,116 +0,0 @@ -# coding: utf-8 - -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- - -""" -FILE: sample_index_crud_operations.py -DESCRIPTION: - This sample demonstrates how to get, create, update, or delete an index. -USAGE: - python sample_index_crud_operations.py - - Set the environment variables with your own values before running the sample: - 1) AZURE_SEARCH_SERVICE_ENDPOINT - the endpoint of your Azure Cognitive Search service - 2) AZURE_SEARCH_API_KEY - your search API key -""" - - -import os -from typing import List - -service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] -key = os.environ["AZURE_SEARCH_API_KEY"] - -from azure.core.credentials import AzureKeyCredential -from azure.search.documents.indexes import SearchIndexClient -from azure.search.documents.indexes.models import ( - ComplexField, - CorsOptions, - SearchIndex, - ScoringProfile, - SearchFieldDataType, - SimpleField, - SearchableField, -) - - -def create_index(): - # [START create_index] - client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) - name = "hotels" - fields = [ - SimpleField(name="hotelId", type=SearchFieldDataType.String, key=True), - SimpleField(name="hotelName", type=SearchFieldDataType.String, searchable=True), - SimpleField(name="baseRate", type=SearchFieldDataType.Double), - SearchableField(name="description", type=SearchFieldDataType.String, collection=True), - ComplexField( - name="address", - fields=[ - SimpleField(name="streetAddress", type=SearchFieldDataType.String), - SimpleField(name="city", type=SearchFieldDataType.String), - ], - collection=True, - ), - ] - cors_options = CorsOptions(allowed_origins=["*"], max_age_in_seconds=60) - scoring_profiles: List[ScoringProfile] = [] - index = SearchIndex(name=name, fields=fields, scoring_profiles=scoring_profiles, cors_options=cors_options) - - result = client.create_index(index) - # [END create_index] - - -def get_index(): - # [START get_index] - client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) - name = "hotels" - result = client.get_index(name) - # [END get_index] - - -def update_index(): - # [START update_index] - client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) - name = "hotels" - fields = [ - SimpleField(name="hotelId", type=SearchFieldDataType.String, key=True), - SimpleField(name="hotelName", type=SearchFieldDataType.String, searchable=True), - SimpleField(name="baseRate", type=SearchFieldDataType.Double), - SearchableField(name="description", type=SearchFieldDataType.String, collection=True), - ComplexField( - name="address", - fields=[ - SimpleField(name="streetAddress", type=SearchFieldDataType.String), - SimpleField(name="city", type=SearchFieldDataType.String), - SimpleField(name="state", type=SearchFieldDataType.String), - ], - collection=True, - ), - ] - cors_options = CorsOptions(allowed_origins=["*"], max_age_in_seconds=60) - scoring_profile = ScoringProfile(name="MyProfile") - scoring_profiles = [] - scoring_profiles.append(scoring_profile) - index = SearchIndex(name=name, fields=fields, scoring_profiles=scoring_profiles, cors_options=cors_options) - - result = client.create_or_update_index(index=index) - # [END update_index] - - -def delete_index(): - # [START delete_index] - client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) - name = "hotels" - client.delete_index(name) - # [END delete_index] - - -if __name__ == "__main__": - create_index() - get_index() - update_index() - delete_index() diff --git a/sdk/search/azure-search-documents/samples/sample_index_synonym_map_crud.py b/sdk/search/azure-search-documents/samples/sample_index_synonym_map_crud.py new file mode 100644 index 000000000000..904ec0815c6c --- /dev/null +++ b/sdk/search/azure-search-documents/samples/sample_index_synonym_map_crud.py @@ -0,0 +1,106 @@ +# coding: utf-8 + +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# -------------------------------------------------------------------------- + +""" +DESCRIPTION: + Demonstrates how to create, get, update, and delete a synonym map. +USAGE: + python sample_index_synonym_map_crud.py + + Set the following environment variables before running the sample: + 1) AZURE_SEARCH_SERVICE_ENDPOINT - base URL of your Azure AI Search service + 2) AZURE_SEARCH_API_KEY - the primary admin key for your search service +""" + +import os +from pathlib import Path + +service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] +key = os.environ["AZURE_SEARCH_API_KEY"] + +map1 = "hotels-sample-synonym-map" +map2 = "hotels-sample-synonym-map-file" +file_path = Path(__file__).resolve().parent / "data" / "synonym_map.txt" + + +def create_synonym_map(name): + # [START create_synonym_map] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.indexes import SearchIndexClient + from azure.search.documents.indexes.models import SynonymMap + + index_client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) + synonyms = [ + "USA, United States, United States of America", + "Washington, Wash. => WA", + ] + synonym_map = SynonymMap(name=name, synonyms=synonyms) + result = index_client.create_synonym_map(synonym_map) + print(f"Created: synonym map '{result.name}'") + # [END create_synonym_map] + + +def create_synonym_map_from_file(name): + # [START create_synonym_map_from_file] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.indexes import SearchIndexClient + from azure.search.documents.indexes.models import SynonymMap + + index_client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) + with open(file_path, "r") as f: + solr_format_synonyms = f.read() + synonyms = solr_format_synonyms.split("\n") + synonym_map = SynonymMap(name=name, synonyms=synonyms) + result = index_client.create_synonym_map(synonym_map) + print(f"Created: synonym map '{result.name}'") + # [END create_synonym_map_from_file] + + +def get_synonym_maps(): + # [START get_synonym_maps] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.indexes import SearchIndexClient + + index_client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) + result = index_client.get_synonym_maps() + names = [x.name for x in result] + print(f"Synonym maps ({len(result)}): {', '.join(names)}") + # [END get_synonym_maps] + + +def get_synonym_map(name): + # [START get_synonym_map] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.indexes import SearchIndexClient + + index_client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) + result = index_client.get_synonym_map(name) + print(f"Retrieved: synonym map '{name}'") + if result: + for syn in result.synonyms: + print(f" {syn}") + # [END get_synonym_map] + + +def delete_synonym_map(name): + # [START delete_synonym_map] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.indexes import SearchIndexClient + + index_client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) + index_client.delete_synonym_map(name) + print(f"Deleted: synonym map '{name}'") + # [END delete_synonym_map] + + +if __name__ == "__main__": + create_synonym_map(map1) + create_synonym_map_from_file(map2) + get_synonym_maps() + get_synonym_map(map1) + delete_synonym_map(map1) + delete_synonym_map(map2) diff --git a/sdk/search/azure-search-documents/samples/sample_indexer_crud.py b/sdk/search/azure-search-documents/samples/sample_indexer_crud.py new file mode 100644 index 000000000000..33741a9be30e --- /dev/null +++ b/sdk/search/azure-search-documents/samples/sample_indexer_crud.py @@ -0,0 +1,179 @@ +# coding: utf-8 + +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# -------------------------------------------------------------------------- + +""" +DESCRIPTION: + Demonstrates how to create, get, update, and delete an indexer. + +USAGE: + python sample_indexer_crud.py + + Set the following environment variables before running the sample: + 1) AZURE_SEARCH_SERVICE_ENDPOINT - base URL of your Azure AI Search service + (e.g., https://.search.windows.net) + 2) AZURE_SEARCH_API_KEY - the admin key for your search service + 3) AZURE_STORAGE_CONNECTION_STRING - connection string for the Azure Storage account +""" + +import os + +service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] +key = os.environ["AZURE_SEARCH_API_KEY"] +connection_string = os.environ["AZURE_STORAGE_CONNECTION_STRING"] +container_name = "hotels-sample-container" +index_name = "hotels-sample-index-indexer-crud" +data_source_name = "hotels-sample-blob" +indexer_name = "hotels-sample-indexer-indexer-crud" + + +def create_indexer(): + # [START create_indexer] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.indexes import SearchIndexClient, SearchIndexerClient + from azure.search.documents.indexes.models import ( + SearchIndexerDataContainer, + SearchIndexerDataSourceConnection, + SearchIndex, + SearchIndexer, + SimpleField, + SearchFieldDataType, + ) + + indexer_client = SearchIndexerClient(service_endpoint, AzureKeyCredential(key)) + index_client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) + + # create an index + fields = [ + SimpleField(name="HotelId", type=SearchFieldDataType.String, key=True), + SimpleField(name="BaseRate", type=SearchFieldDataType.Double), + ] + index = SearchIndex(name=index_name, fields=fields) + index_client.create_index(index) + + # create a datasource + container = SearchIndexerDataContainer(name=container_name) + data_source_connection = SearchIndexerDataSourceConnection( + name=data_source_name, + type="azureblob", + connection_string=connection_string, + container=container, + ) + indexer_client.create_data_source_connection(data_source_connection) + + # create an indexer + indexer = SearchIndexer( + name=indexer_name, + data_source_name=data_source_name, + target_index_name=index_name, + ) + result = indexer_client.create_indexer(indexer) + print(f"Created: indexer '{result.name}'") + # [END create_indexer] + + +def list_indexers(): + # [START list_indexers] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.indexes import SearchIndexerClient + + indexer_client = SearchIndexerClient(service_endpoint, AzureKeyCredential(key)) + + result = indexer_client.get_indexers() + names = [x.name for x in result] + print(f"Indexers ({len(result)}): {', '.join(names)}") + # [END list_indexers] + + +def get_indexer(): + # [START get_indexer] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.indexes import SearchIndexerClient + + indexer_client = SearchIndexerClient(service_endpoint, AzureKeyCredential(key)) + + result = indexer_client.get_indexer(indexer_name) + print(f"Retrieved: indexer '{result.name}'") + return result + # [END get_indexer] + + +def get_indexer_status(): + # [START get_indexer_status] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.indexes import SearchIndexerClient + + indexer_client = SearchIndexerClient(service_endpoint, AzureKeyCredential(key)) + + status = indexer_client.get_indexer_status(indexer_name) + print(f"Status: indexer '{indexer_name}' is {status.status}") + return status + # [END get_indexer_status] + + +def run_indexer(): + # [START run_indexer] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.indexes import SearchIndexerClient + + indexer_client = SearchIndexerClient(service_endpoint, AzureKeyCredential(key)) + + indexer_client.run_indexer(indexer_name) + print(f"Ran: indexer '{indexer_name}'") + # [END run_indexer] + + +def reset_indexer(): + # [START reset_indexer] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.indexes import SearchIndexerClient + + indexer_client = SearchIndexerClient(service_endpoint, AzureKeyCredential(key)) + result = indexer_client.reset_indexer(indexer_name) + print(f"Reset: indexer '{indexer_name}'") + return result + # [END reset_indexer] + + +def delete_indexer(): + # [START delete_indexer] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.indexes import SearchIndexerClient + + indexer_client = SearchIndexerClient(service_endpoint, AzureKeyCredential(key)) + indexer_client.delete_indexer(indexer_name) + print(f"Deleted: indexer '{indexer_name}'") + # [END delete_indexer] + + +def delete_data_source(): + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.indexes import SearchIndexerClient + + indexer_client = SearchIndexerClient(service_endpoint, AzureKeyCredential(key)) + indexer_client.delete_data_source_connection(data_source_name) + print(f"Deleted: data source '{data_source_name}'") + + +def delete_index(): + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.indexes import SearchIndexClient + + index_client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) + index_client.delete_index(index_name) + print(f"Deleted: index '{index_name}'") + + +if __name__ == "__main__": + create_indexer() + list_indexers() + get_indexer() + get_indexer_status() + run_indexer() + reset_indexer() + delete_indexer() + delete_data_source() + delete_index() diff --git a/sdk/search/azure-search-documents/samples/sample_indexer_datasource_crud.py b/sdk/search/azure-search-documents/samples/sample_indexer_datasource_crud.py new file mode 100644 index 000000000000..b110043daee9 --- /dev/null +++ b/sdk/search/azure-search-documents/samples/sample_indexer_datasource_crud.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# -------------------------------------------------------------------------- + +""" +DESCRIPTION: + Demonstrates how to create, get, update, and delete a data source. + +USAGE: + python sample_indexer_datasource_crud.py + + Set the following environment variables before running the sample: + 1) AZURE_SEARCH_SERVICE_ENDPOINT - base URL of your Azure AI Search service + (e.g., https://.search.windows.net) + 2) AZURE_SEARCH_API_KEY - the admin key for your search service + 3) AZURE_STORAGE_CONNECTION_STRING - connection string for the Azure Storage account +""" + +import os + +service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] +key = os.environ["AZURE_SEARCH_API_KEY"] +connection_string = os.environ["AZURE_STORAGE_CONNECTION_STRING"] + +data_source_connection_name = "hotels-sample-blob" +container_name = "hotels-sample-container" + + +def create_data_source_connection(): + # [START create_data_source_connection] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.indexes import SearchIndexerClient + from azure.search.documents.indexes.models import ( + SearchIndexerDataContainer, + SearchIndexerDataSourceConnection, + ) + + indexer_client = SearchIndexerClient(service_endpoint, AzureKeyCredential(key)) + + container = SearchIndexerDataContainer(name=container_name) + data_source_connection = SearchIndexerDataSourceConnection( + name=data_source_connection_name, + type="azureblob", + connection_string=connection_string, + container=container, + ) + result = indexer_client.create_data_source_connection(data_source_connection) + print(f"Created: data source '{result.name}'") + # [END create_data_source_connection] + + +def list_data_source_connections(): + # [START list_data_source_connections] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.indexes import SearchIndexerClient + + indexer_client = SearchIndexerClient(service_endpoint, AzureKeyCredential(key)) + + result = indexer_client.get_data_source_connections() + names = [ds.name for ds in result] + print(f"Data sources ({len(result)}): {', '.join(names)}") + # [END list_data_source_connections] + + +def get_data_source_connection(): + # [START get_data_source_connection] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.indexes import SearchIndexerClient + + indexer_client = SearchIndexerClient(service_endpoint, AzureKeyCredential(key)) + + result = indexer_client.get_data_source_connection(data_source_connection_name) + print(f"Retrieved: data source '{result.name}'") + # [END get_data_source_connection] + + +def delete_data_source_connection(): + # [START delete_data_source_connection] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.indexes import SearchIndexerClient + + indexer_client = SearchIndexerClient(service_endpoint, AzureKeyCredential(key)) + + indexer_client.delete_data_source_connection(data_source_connection_name) + print(f"Deleted: data source '{data_source_connection_name}'") + # [END delete_data_source_connection] + + +if __name__ == "__main__": + create_data_source_connection() + list_data_source_connections() + get_data_source_connection() + delete_data_source_connection() diff --git a/sdk/search/azure-search-documents/samples/sample_indexer_datasource_skillset.py b/sdk/search/azure-search-documents/samples/sample_indexer_datasource_skillset.py deleted file mode 100644 index b0654e56210f..000000000000 --- a/sdk/search/azure-search-documents/samples/sample_indexer_datasource_skillset.py +++ /dev/null @@ -1,152 +0,0 @@ -""" -FILE: sample_indexer_datasource_skillset.py -DESCRIPTION: - This sample demonstrates use an indexer, datasource and skillset together. - - Indexer is used to efficiently write data to an index using a datasource. - So we first identify a supported data source - we use azure storage blobs - in this example. Then we create an index which is compatible with the datasource. - Further, we create an azure cognitive search datasource which we require to finally - create an indexer. - - Additionally, we will also use skillsets to provide some AI enhancements in our indexers. - - Once we create the indexer, we run the indexer and perform some basic operations like getting - the indexer status. - - The datasource used in this sample is stored as metadata for empty blobs in "searchcontainer". - The json file can be found in samples/files folder named hotel_small.json has the metdata of - each blob. -USAGE: - python sample_indexer_datasource_skillset.py - - Set the environment variables with your own values before running the sample: - 1) AZURE_SEARCH_SERVICE_ENDPOINT - the endpoint of your Azure Cognitive Search service - 2) AZURE_SEARCH_API_KEY - your search API key - 3) AZURE_STORAGE_CONNECTION_STRING - The connection string for the storage blob account that is - being used to create the datasource. -""" - -import os -import datetime - -service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] -key = os.environ["AZURE_SEARCH_API_KEY"] -connection_string = os.environ["AZURE_STORAGE_CONNECTION_STRING"] - -from azure.core.credentials import AzureKeyCredential -from azure.search.documents.indexes.models import ( - SearchIndexerDataContainer, - SearchIndex, - SearchIndexer, - SimpleField, - SearchFieldDataType, - EntityRecognitionSkill, - InputFieldMappingEntry, - OutputFieldMappingEntry, - SearchIndexerSkillset, - CorsOptions, - IndexingSchedule, - SearchableField, - IndexingParameters, - SearchIndexerDataSourceConnection, - IndexingParametersConfiguration, -) -from azure.search.documents.indexes import SearchIndexerClient, SearchIndexClient - - -def _create_index(): - name = "hotel-index" - - # Here we create an index with listed fields. - fields = [ - SimpleField(name="hotelId", type=SearchFieldDataType.String, filterable=True, sortable=True, key=True), - SearchableField(name="hotelName", type=SearchFieldDataType.String), - SimpleField(name="description", type=SearchFieldDataType.String), - SimpleField(name="descriptionFr", type=SearchFieldDataType.String), - SimpleField(name="category", type=SearchFieldDataType.String), - SimpleField(name="parkingIncluded", type=SearchFieldDataType.Boolean, filterable=True), - SimpleField(name="smokingAllowed", type=SearchFieldDataType.Boolean, filterable=True), - SimpleField(name="lastRenovationDate", type=SearchFieldDataType.String), - SimpleField(name="rating", type=SearchFieldDataType.Int64, sortable=True), - SimpleField(name="location", type=SearchFieldDataType.GeographyPoint), - ] - cors_options = CorsOptions(allowed_origins=["*"], max_age_in_seconds=60) - - # pass in the name, fields and cors options and create the index - index = SearchIndex(name=name, fields=fields, cors_options=cors_options) - index_client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) - result = index_client.create_index(index) - return result - - -def _create_datasource(): - # Here we create a datasource. As mentioned in the description we have stored it in - # "searchcontainer" - ds_client = SearchIndexerClient(service_endpoint, AzureKeyCredential(key)) - container = SearchIndexerDataContainer(name="searchcontainer") - data_source_connection = SearchIndexerDataSourceConnection( - name="hotel-datasource", type="azureblob", connection_string=connection_string, container=container - ) - data_source = ds_client.create_data_source_connection(data_source_connection) - return data_source - - -def _create_skillset(): - client = SearchIndexerClient(service_endpoint, AzureKeyCredential(key)) - inp = InputFieldMappingEntry(name="text", source="/document/lastRenovationDate") - output = OutputFieldMappingEntry(name="dateTimes", target_name="RenovatedDate") - s = EntityRecognitionSkill(name="merge-skill", inputs=[inp], outputs=[output]) - - skillset = SearchIndexerSkillset(name="hotel-data-skill", skills=[s], description="example skillset") - result = client.create_skillset(skillset) - return result - - -def sample_indexer_workflow(): - # Now that we have a datasource and an index, we can create an indexer. - - skillset_name = _create_skillset().name - print("Skillset is created") - - ds_name = _create_datasource().name - print("Data source is created") - - ind_name = _create_index().name - print("Index is created") - - # we pass the data source, skillsets and targeted index to build an indexer - configuration = IndexingParametersConfiguration(parsing_mode="jsonArray", query_timeout=None) # type: ignore - parameters = IndexingParameters(configuration=configuration) - indexer = SearchIndexer( - name="hotel-data-indexer", - data_source_name=ds_name, - target_index_name=ind_name, - skillset_name=skillset_name, - parameters=parameters, - ) - - indexer_client = SearchIndexerClient(service_endpoint, AzureKeyCredential(key)) - indexer_client.create_indexer(indexer) # create the indexer - - # to get an indexer - result = indexer_client.get_indexer("hotel-data-indexer") - print(result) - - # To run an indexer, we can use run_indexer() - indexer_client.run_indexer(result.name) - - # Using create or update to schedule an indexer - - schedule = IndexingSchedule(interval=datetime.timedelta(hours=24)) - result.schedule = schedule - updated_indexer = indexer_client.create_or_update_indexer(result) - - print(updated_indexer) - - # get the status of an indexer - indexer_client.get_indexer_status(updated_indexer.name) - - -if __name__ == "__main__": - sample_indexer_workflow() diff --git a/sdk/search/azure-search-documents/samples/sample_indexer_workflow.py b/sdk/search/azure-search-documents/samples/sample_indexer_workflow.py new file mode 100644 index 000000000000..12e3bb668c9a --- /dev/null +++ b/sdk/search/azure-search-documents/samples/sample_indexer_workflow.py @@ -0,0 +1,168 @@ +# coding: utf-8 + +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# -------------------------------------------------------------------------- + +""" +DESCRIPTION: + Demonstrates how to run an indexer with a data source and skillset. + +USAGE: + python sample_indexer_workflow.py + + Set the following environment variables before running the sample: + 1) AZURE_SEARCH_SERVICE_ENDPOINT - base URL of your Azure AI Search service + (e.g., https://.search.windows.net) + 2) AZURE_SEARCH_API_KEY - the admin key for your search service + 3) AZURE_STORAGE_CONNECTION_STRING - connection string for the Azure Storage account +""" + +import os + +service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] +key = os.environ["AZURE_SEARCH_API_KEY"] +connection_string = os.environ["AZURE_STORAGE_CONNECTION_STRING"] + +index_name = "hotels-sample-index-indexer-workflow" +data_source_name = "hotels-sample-blob" +skillset_name = "hotels-sample-skillset" +indexer_name = "hotels-sample-indexer-indexer-workflow" +container_name = "hotels-sample-container" + + +def sample_indexer_workflow(): + # [START sample_indexer_workflow] + import datetime + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.indexes import SearchIndexerClient, SearchIndexClient + from azure.search.documents.indexes.models import ( + SearchIndexerDataContainer, + SearchIndex, + SearchIndexer, + SimpleField, + SearchFieldDataType, + EntityRecognitionSkill, + InputFieldMappingEntry, + OutputFieldMappingEntry, + SearchIndexerSkillset, + CorsOptions, + IndexingSchedule, + SearchableField, + IndexingParameters, + SearchIndexerDataSourceConnection, + IndexingParametersConfiguration, + ) + + indexer_client = SearchIndexerClient(service_endpoint, AzureKeyCredential(key)) + index_client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) + + # 1. Create an index + fields = [ + SimpleField( + name="HotelId", + type=SearchFieldDataType.String, + filterable=True, + sortable=True, + key=True, + ), + SearchableField(name="HotelName", type=SearchFieldDataType.String), + SimpleField(name="Description", type=SearchFieldDataType.String), + SimpleField(name="Description_fr", type=SearchFieldDataType.String), + SimpleField(name="Category", type=SearchFieldDataType.String), + SimpleField( + name="ParkingIncluded", type=SearchFieldDataType.Boolean, filterable=True + ), + SimpleField( + name="SmokingAllowed", type=SearchFieldDataType.Boolean, filterable=True + ), + SimpleField(name="LastRenovationDate", type=SearchFieldDataType.String), + SimpleField(name="Rating", type=SearchFieldDataType.Double, sortable=True), + SimpleField(name="Location", type=SearchFieldDataType.GeographyPoint), + ] + cors_options = CorsOptions(allowed_origins=["*"], max_age_in_seconds=60) + index = SearchIndex(name=index_name, fields=fields, cors_options=cors_options) + index_client.create_index(index) + print(f"Created: index '{index_name}'") + + # 2. Create a data source + container = SearchIndexerDataContainer(name=container_name) + data_source_connection = SearchIndexerDataSourceConnection( + name=data_source_name, + type="azureblob", + connection_string=connection_string, + container=container, + ) + indexer_client.create_data_source_connection(data_source_connection) + print(f"Created: data source '{data_source_name}'") + + # 3. Create a skillset + inp = InputFieldMappingEntry(name="text", source="/document/lastRenovationDate") + output = OutputFieldMappingEntry(name="dateTimes", target_name="RenovatedDate") + skill = EntityRecognitionSkill(name="merge-skill", inputs=[inp], outputs=[output]) + skillset = SearchIndexerSkillset( + name=skillset_name, skills=[skill], description="example skillset" + ) + indexer_client.create_skillset(skillset) + print(f"Created: skillset '{skillset_name}'") + + # 4. Create an indexer + configuration = IndexingParametersConfiguration(parsing_mode="jsonArray") + parameters = IndexingParameters(configuration=configuration) + indexer = SearchIndexer( + name=indexer_name, + data_source_name=data_source_name, + target_index_name=index_name, + skillset_name=skillset_name, + parameters=parameters, + ) + indexer_client.create_indexer(indexer) + print(f"Created: indexer '{indexer_name}'") + + # Get the indexer + result = indexer_client.get_indexer(indexer_name) + print(f"Retrieved: indexer '{result.name}'") + + # Run the indexer + indexer_client.run_indexer(result.name) + print("Started: indexer run") + + # Schedule the indexer + schedule = IndexingSchedule(interval=datetime.timedelta(hours=24)) + result.schedule = schedule + updated_indexer = indexer_client.create_or_update_indexer(result) + if updated_indexer.schedule is not None: + print(f"Scheduled: indexer every {updated_indexer.schedule.interval}") + + # Get indexer status + status = indexer_client.get_indexer_status(updated_indexer.name) + print(f"Status: indexer '{updated_indexer.name}' is {status.status}") + # [END sample_indexer_workflow] + + +def delete_indexer_workflow_resources(): + # [START delete_indexer_workflow_resources] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.indexes import SearchIndexerClient, SearchIndexClient + + indexer_client = SearchIndexerClient(service_endpoint, AzureKeyCredential(key)) + index_client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) + + indexer_client.delete_indexer(indexer_name) + print(f"Deleted: indexer '{indexer_name}'") + + indexer_client.delete_skillset(skillset_name) + print(f"Deleted: skillset '{skillset_name}'") + + indexer_client.delete_data_source_connection(data_source_name) + print(f"Deleted: data source '{data_source_name}'") + + index_client.delete_index(index_name) + print(f"Deleted: index '{index_name}'") + # [END delete_indexer_workflow_resources] + + +if __name__ == "__main__": + sample_indexer_workflow() + delete_indexer_workflow_resources() diff --git a/sdk/search/azure-search-documents/samples/sample_indexers_operations.py b/sdk/search/azure-search-documents/samples/sample_indexers_operations.py deleted file mode 100644 index ff854d51be96..000000000000 --- a/sdk/search/azure-search-documents/samples/sample_indexers_operations.py +++ /dev/null @@ -1,123 +0,0 @@ -# coding: utf-8 - -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- - -""" -FILE: sample_indexer_operations.py -DESCRIPTION: - This sample demonstrates how to get, create, update, or delete a Indexer. -USAGE: - python sample_indexer_operations.py - - Set the environment variables with your own values before running the sample: - 1) AZURE_SEARCH_SERVICE_ENDPOINT - the endpoint of your Azure Cognitive Search service - 2) AZURE_SEARCH_API_KEY - your search API key -""" - -import os - -service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] -key = os.environ["AZURE_SEARCH_API_KEY"] -connection_string = os.environ["AZURE_STORAGE_CONNECTION_STRING"] - -from azure.core.credentials import AzureKeyCredential -from azure.search.documents.indexes.models import ( - SearchIndexerDataContainer, - SearchIndexerDataSourceConnection, - SearchIndex, - SearchIndexer, - SimpleField, - SearchFieldDataType, -) -from azure.search.documents.indexes import SearchIndexClient, SearchIndexerClient - -indexers_client = SearchIndexerClient(service_endpoint, AzureKeyCredential(key)) - - -def create_indexer(): - # create an index - index_name = "indexer-hotels" - fields = [ - SimpleField(name="hotelId", type=SearchFieldDataType.String, key=True), - SimpleField(name="baseRate", type=SearchFieldDataType.Double), - ] - index = SearchIndex(name=index_name, fields=fields) - ind_client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) - ind_client.create_index(index) - - # [START create_indexer] - # create a datasource - container = SearchIndexerDataContainer(name="searchcontainer") - data_source_connection = SearchIndexerDataSourceConnection( - name="indexer-datasource", type="azureblob", connection_string=connection_string, container=container - ) - data_source = indexers_client.create_data_source_connection(data_source_connection) - - # create an indexer - indexer = SearchIndexer( - name="sample-indexer", data_source_name="indexer-datasource", target_index_name="indexer-hotels" - ) - result = indexers_client.create_indexer(indexer) - print("Create new Indexer - sample-indexer") - # [END create_indexer] - - -def list_indexers(): - # [START list_indexer] - result = indexers_client.get_indexers() - names = [x.name for x in result] - print("Found {} Indexers in the service: {}".format(len(result), ", ".join(names))) - # [END list_indexer] - - -def get_indexer(): - # [START get_indexer] - result = indexers_client.get_indexer("sample-indexer") - print("Retrived Indexer 'sample-indexer'") - return result - # [END get_indexer] - - -def get_indexer_status(): - # [START get_indexer_status] - result = indexers_client.get_indexer_status("sample-indexer") - print("Retrived Indexer status for 'sample-indexer'") - return result - # [END get_indexer_status] - - -def run_indexer(): - # [START run_indexer] - result = indexers_client.run_indexer("sample-indexer") - print("Ran the Indexer 'sample-indexer'") - return result - # [END run_indexer] - - -def reset_indexer(): - # [START reset_indexer] - result = indexers_client.reset_indexer("sample-indexer") - print("Reset the Indexer 'sample-indexer'") - return result - # [END reset_indexer] - - -def delete_indexer(): - # [START delete_indexer] - indexers_client.delete_indexer("sample-indexer") - print("Indexer 'sample-indexer' successfully deleted") - # [END delete_indexer] - - -if __name__ == "__main__": - create_indexer() - list_indexers() - get_indexer() - get_indexer_status() - run_indexer() - reset_indexer() - delete_indexer() diff --git a/sdk/search/azure-search-documents/samples/sample_autocomplete.py b/sdk/search/azure-search-documents/samples/sample_query_autocomplete.py similarity index 52% rename from sdk/search/azure-search-documents/samples/sample_autocomplete.py rename to sdk/search/azure-search-documents/samples/sample_query_autocomplete.py index 7846637b5dbc..4df5327c3371 100644 --- a/sdk/search/azure-search-documents/samples/sample_autocomplete.py +++ b/sdk/search/azure-search-documents/samples/sample_query_autocomplete.py @@ -1,23 +1,22 @@ # coding: utf-8 # ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. # -------------------------------------------------------------------------- """ -FILE: sample_autocomplete.py DESCRIPTION: - This sample demonstrates how to obtain autocompletion suggestions from an - Azure Search index. + Demonstrates how to retrieve autocomplete suggestions. + USAGE: - python sample_autocomplete.py + python sample_query_autocomplete.py - Set the environment variables with your own values before running the sample: - 1) AZURE_SEARCH_SERVICE_ENDPOINT - the endpoint of your Azure Cognitive Search service - 2) AZURE_SEARCH_INDEX_NAME - the name of your search index (e.g. "hotels-sample-index") - 3) AZURE_SEARCH_API_KEY - your search API key + Set the following environment variables before running the sample: + 1) AZURE_SEARCH_SERVICE_ENDPOINT - base URL of your Azure AI Search service + (e.g., https://.search.windows.net) + 2) AZURE_SEARCH_INDEX_NAME - target search index name (e.g., "hotels-sample-index") + 3) AZURE_SEARCH_API_KEY - the admin key for your search service """ import os @@ -36,9 +35,9 @@ def autocomplete_query(): results = search_client.autocomplete(search_text="bo", suggester_name="sg") - print("Autocomplete suggestions for 'bo'") + print("Results: autocomplete for 'bo'") for result in results: - print(" Completion: {}".format(result["text"])) + print(f" Completion: {result['text']}") # [END autocomplete_query] diff --git a/sdk/search/azure-search-documents/samples/sample_query_facets.py b/sdk/search/azure-search-documents/samples/sample_query_facets.py new file mode 100644 index 000000000000..4ff9245422c0 --- /dev/null +++ b/sdk/search/azure-search-documents/samples/sample_query_facets.py @@ -0,0 +1,50 @@ +# coding: utf-8 + +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# -------------------------------------------------------------------------- + +""" +DESCRIPTION: + Demonstrates how to retrieve facets in search results. + +USAGE: + python sample_query_facets.py + + Set the following environment variables before running the sample: + 1) AZURE_SEARCH_SERVICE_ENDPOINT - base URL of your Azure AI Search service + (e.g., https://.search.windows.net) + 2) AZURE_SEARCH_INDEX_NAME - target search index name (e.g., "hotels-sample-index") + 3) AZURE_SEARCH_API_KEY - the admin key for your search service +""" + +import os + +service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] +index_name = os.environ["AZURE_SEARCH_INDEX_NAME"] +key = os.environ["AZURE_SEARCH_API_KEY"] + + +def facet_query(): + # [START facet_query] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents import SearchClient + + search_client = SearchClient(service_endpoint, index_name, AzureKeyCredential(key)) + + results = search_client.search( + search_text="WiFi", facets=["Category,count:3", "ParkingIncluded"] + ) + + facets = results.get_facets() + + print("Results: category facets") + if facets: + for facet in facets["Category"]: + print(f" {facet}") + # [END facet_query] + + +if __name__ == "__main__": + facet_query() diff --git a/sdk/search/azure-search-documents/samples/sample_query_filter.py b/sdk/search/azure-search-documents/samples/sample_query_filter.py new file mode 100644 index 000000000000..dbc1faa0c961 --- /dev/null +++ b/sdk/search/azure-search-documents/samples/sample_query_filter.py @@ -0,0 +1,50 @@ +# coding: utf-8 + +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# -------------------------------------------------------------------------- + +""" +DESCRIPTION: + Demonstrates how to filter and sort search results. + +USAGE: + python sample_query_filter.py + + Set the following environment variables before running the sample: + 1) AZURE_SEARCH_SERVICE_ENDPOINT - base URL of your Azure AI Search service + (e.g., https://.search.windows.net) + 2) AZURE_SEARCH_INDEX_NAME - target search index name (e.g., "hotels-sample-index") + 3) AZURE_SEARCH_API_KEY - the admin key for your search service +""" + +import os + +service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] +index_name = os.environ["AZURE_SEARCH_INDEX_NAME"] +key = os.environ["AZURE_SEARCH_API_KEY"] + + +def filter_query(): + # [START filter_query] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents import SearchClient + + search_client = SearchClient(service_endpoint, index_name, AzureKeyCredential(key)) + + results = search_client.search( + search_text="WiFi", + filter="Address/StateProvince eq 'FL' and Address/Country eq 'USA'", + select=["HotelName", "Rating"], + order_by=["Rating desc"], + ) + + print("Results: Florida hotels with WiFi (sorted by rating)") + for result in results: + print(f" HotelName: {result['HotelName']} (rating {result['Rating']})") + # [END filter_query] + + +if __name__ == "__main__": + filter_query() diff --git a/sdk/search/azure-search-documents/samples/sample_query_semantic.py b/sdk/search/azure-search-documents/samples/sample_query_semantic.py new file mode 100644 index 000000000000..bffdf3f8f3fc --- /dev/null +++ b/sdk/search/azure-search-documents/samples/sample_query_semantic.py @@ -0,0 +1,120 @@ +# coding: utf-8 + +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# -------------------------------------------------------------------------- + +""" +DESCRIPTION: + Demonstrates how to perform semantic search. +USAGE: + python sample_query_semantic.py + + Set the following environment variables before running the sample: + 1) AZURE_SEARCH_SERVICE_ENDPOINT - base URL of your Azure AI Search service + 2) AZURE_SEARCH_INDEX_NAME - target search index name (e.g., "hotels-sample-index") + 3) AZURE_SEARCH_API_KEY - the primary admin key for your search service +""" + +import os + +service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] +index_name = os.environ["AZURE_SEARCH_INDEX_NAME"] +key = os.environ["AZURE_SEARCH_API_KEY"] +semantic_configuration_name = "hotels-sample-semantic-config" + + +def create_semantic_configuration(): + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.indexes import SearchIndexClient + from azure.search.documents.indexes.models import ( + SemanticConfiguration, + SemanticPrioritizedFields, + SemanticField, + SemanticSearch, + ) + + index_client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) + index = index_client.get_index(index_name) + + semantic_config = SemanticConfiguration( + name=semantic_configuration_name, + prioritized_fields=SemanticPrioritizedFields( + title_field=SemanticField(field_name="HotelName"), + content_fields=[SemanticField(field_name="Description")], + keywords_fields=[SemanticField(field_name="Tags")], + ), + ) + + index.semantic_search = SemanticSearch(configurations=[semantic_config]) + index_client.create_or_update_index(index) + print( + f"Updated: index '{index_name}' (semantic config '{semantic_configuration_name}')" + ) + + +def speller(): + # [START speller] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents import SearchClient + + credential = AzureKeyCredential(key) + search_client = SearchClient( + endpoint=service_endpoint, index_name=index_name, credential=credential + ) + results = list( + search_client.search( + search_text="luxury", query_language="en-us", query_speller="lexicon" + ) + ) + + print("Results: speller") + for result in results: + print(f" HotelId: {result['HotelId']}") + print(f" HotelName: {result['HotelName']}") + # [END speller] + + +def semantic_ranking(): + # [START semantic_ranking] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents import SearchClient + + credential = AzureKeyCredential(key) + search_client = SearchClient( + endpoint=service_endpoint, index_name=index_name, credential=credential + ) + results = list( + search_client.search( + search_text="luxury", + query_type="semantic", + semantic_configuration_name=semantic_configuration_name, + query_language="en-us", + ) + ) + + print("Results: semantic ranking") + for result in results: + print(f" HotelId: {result['HotelId']}") + print(f" HotelName: {result['HotelName']}") + # [END semantic_ranking] + + +def delete_semantic_configuration(): + from azure.core.credentials import AzureKeyCredential + from azure.search.documents.indexes import SearchIndexClient + + index_client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) + index = index_client.get_index(index_name) + + index.semantic_search = None + index_client.create_or_update_index(index) + print(f"Deleted: semantic config from index '{index_name}'") + + +if __name__ == "__main__": + create_semantic_configuration() + speller() + semantic_ranking() + delete_semantic_configuration() diff --git a/sdk/search/azure-search-documents/samples/sample_query_session.py b/sdk/search/azure-search-documents/samples/sample_query_session.py index 64e783ff636a..4820f7bbc086 100644 --- a/sdk/search/azure-search-documents/samples/sample_query_session.py +++ b/sdk/search/azure-search-documents/samples/sample_query_session.py @@ -1,24 +1,20 @@ # coding: utf-8 # ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. # -------------------------------------------------------------------------- """ -FILE: sample_query_session.py DESCRIPTION: - To ensure more consistent and unique search results within a user's session, you can use session id. - Simply include the session_id parameter in your queries to create a unique identifier for each user session. - This ensures a uniform experience for users throughout their "query session". + Demonstrates how to use session IDs for consistent scoring. USAGE: python sample_query_session.py - Set the environment variables with your own values before running the sample: - 1) AZURE_SEARCH_SERVICE_ENDPOINT - the endpoint of your Azure Cognitive Search service - 2) AZURE_SEARCH_INDEX_NAME - the name of your search index (e.g. "hotels-sample-index") - 3) AZURE_SEARCH_API_KEY - your search API key + Set the following environment variables before running the sample: + 1) AZURE_SEARCH_SERVICE_ENDPOINT - base URL of your Azure AI Search service + 2) AZURE_SEARCH_INDEX_NAME - target search index name (e.g., "hotels-sample-index") + 3) AZURE_SEARCH_API_KEY - the primary admin key for your search service """ import os @@ -37,9 +33,9 @@ def query_session(): results = search_client.search(search_text="spa", session_id="session-1") - print("Hotels containing 'spa' in the name (or other fields):") + print("Results: hotels with 'spa'") for result in results: - print(" Name: {} (rating {})".format(result["hotelName"], result["rating"])) + print(f" HotelName: {result['HotelName']} (rating {result['Rating']})") # [END query_session] diff --git a/sdk/search/azure-search-documents/samples/sample_query_simple.py b/sdk/search/azure-search-documents/samples/sample_query_simple.py new file mode 100644 index 000000000000..c7211692103d --- /dev/null +++ b/sdk/search/azure-search-documents/samples/sample_query_simple.py @@ -0,0 +1,43 @@ +# coding: utf-8 + +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# -------------------------------------------------------------------------- + +""" +DESCRIPTION: + Demonstrates how to perform a simple text search. +USAGE: + python sample_query_simple.py + + Set the following environment variables before running the sample: + 1) AZURE_SEARCH_SERVICE_ENDPOINT - base URL of your Azure AI Search service + 2) AZURE_SEARCH_INDEX_NAME - target search index name (e.g., "hotels-sample-index") + 3) AZURE_SEARCH_API_KEY - the primary admin key for your search service +""" + +import os + +service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] +index_name = os.environ["AZURE_SEARCH_INDEX_NAME"] +key = os.environ["AZURE_SEARCH_API_KEY"] + + +def simple_query(): + # [START simple_query] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents import SearchClient + + search_client = SearchClient(service_endpoint, index_name, AzureKeyCredential(key)) + + results = search_client.search(search_text="spa") + + print("Results: hotels with 'spa'") + for result in results: + print(f" HotelName: {result['HotelName']} (rating {result['Rating']})") + # [END simple_query] + + +if __name__ == "__main__": + simple_query() diff --git a/sdk/search/azure-search-documents/samples/sample_query_suggestions.py b/sdk/search/azure-search-documents/samples/sample_query_suggestions.py new file mode 100644 index 000000000000..3bf3d9444022 --- /dev/null +++ b/sdk/search/azure-search-documents/samples/sample_query_suggestions.py @@ -0,0 +1,44 @@ +# coding: utf-8 + +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# -------------------------------------------------------------------------- + +""" +DESCRIPTION: + Demonstrates how to retrieve search suggestions. +USAGE: + python sample_query_suggestions.py + + Set the following environment variables before running the sample: + 1) AZURE_SEARCH_SERVICE_ENDPOINT - base URL of your Azure AI Search service + 2) AZURE_SEARCH_INDEX_NAME - target search index name (e.g., "hotels-sample-index") + 3) AZURE_SEARCH_API_KEY - the primary admin key for your search service +""" + +import os + +service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] +index_name = os.environ["AZURE_SEARCH_INDEX_NAME"] +key = os.environ["AZURE_SEARCH_API_KEY"] + + +def suggest_query(): + # [START suggest_query] + from azure.core.credentials import AzureKeyCredential + from azure.search.documents import SearchClient + + search_client = SearchClient(service_endpoint, index_name, AzureKeyCredential(key)) + + results = search_client.suggest(search_text="coffee", suggester_name="sg") + + print("Results: suggestions for 'coffee'") + for result in results: + hotel = search_client.get_document(key=result["HotelId"]) + print(f" Text: {result['text']!r}, HotelName: {hotel['HotelName']}") + # [END suggest_query] + + +if __name__ == "__main__": + suggest_query() diff --git a/sdk/search/azure-search-documents/samples/sample_query_vector.py b/sdk/search/azure-search-documents/samples/sample_query_vector.py new file mode 100644 index 000000000000..043b5484d331 --- /dev/null +++ b/sdk/search/azure-search-documents/samples/sample_query_vector.py @@ -0,0 +1,259 @@ +# coding: utf-8 + +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# -------------------------------------------------------------------------- + +""" +DESCRIPTION: + Demonstrates how to create a vector-enabled index, upload documents with + pre-computed DescriptionVector values, and run vector queries. + +USAGE: + python sample_query_vector.py + + Set the following environment variables before running the sample: + 1) AZURE_SEARCH_SERVICE_ENDPOINT - base URL of your Azure AI Search service + 2) AZURE_SEARCH_API_KEY - the primary admin key for your search service + +NOTE: + This sample uses a pre-computed vector for the query "quintessential lodging + near running trails, eateries, retail" instead of calling an embedding API. + The vector was generated using text-embedding-ada-002 (1536 dimensions). +""" + +import json +import os +from pathlib import Path +from azure.core.credentials import AzureKeyCredential +from azure.search.documents import SearchClient +from azure.search.documents.models import VectorizedQuery +from azure.search.documents.indexes import SearchIndexClient +from azure.search.documents.indexes.models import ( + SimpleField, + SearchField, + SearchFieldDataType, + SearchableField, + SearchIndex, + SemanticConfiguration, + SemanticField, + SemanticPrioritizedFields, + SemanticSearch, + VectorSearch, + VectorSearchProfile, + HnswAlgorithmConfiguration, + ExhaustiveKnnAlgorithmConfiguration, +) + +service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] +key = os.environ["AZURE_SEARCH_API_KEY"] + +index_name = "hotels-sample-index-query-vector" +data_dir = Path(__file__).resolve().parent / "data" +documents_path = data_dir / "hotels_with_description_vector.json" +query_vector_path = data_dir / "query_vector.json" + + +def load_query_vector(query_vector_path): + """Load the query vector from the samples/data folder.""" + with open(query_vector_path, "r", encoding="utf-8") as handle: + return json.load(handle) + + +vector = load_query_vector(query_vector_path) + + +def create_index(): + """Create or update the vector-enabled search index.""" + index_client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) + + fields = [ + SimpleField( + name="HotelId", type=SearchFieldDataType.String, key=True, filterable=True + ), + SearchableField( + name="HotelName", type=SearchFieldDataType.String, sortable=True + ), + SearchableField(name="Description", type=SearchFieldDataType.String), + SearchField( + name="DescriptionVector", + type=SearchFieldDataType.Collection(SearchFieldDataType.Single), + searchable=True, + vector_search_dimensions=1536, + vector_search_profile_name="my-vector-profile", + ), + SearchableField( + name="Category", + type=SearchFieldDataType.String, + sortable=True, + filterable=True, + facetable=True, + ), + SearchField( + name="Tags", + type=SearchFieldDataType.Collection(SearchFieldDataType.String), + searchable=True, + filterable=True, + facetable=True, + ), + ] + + vector_search = VectorSearch( + algorithms=[ + HnswAlgorithmConfiguration(name="my-hnsw-vector-config-1", kind="hnsw"), + ExhaustiveKnnAlgorithmConfiguration( + name="my-eknn-vector-config", kind="exhaustiveKnn" + ), + ], + profiles=[ + VectorSearchProfile( + name="my-vector-profile", + algorithm_configuration_name="my-hnsw-vector-config-1", + ) + ], + ) + + semantic_config = SemanticConfiguration( + name="my-semantic-config", + prioritized_fields=SemanticPrioritizedFields( + title_field=SemanticField(field_name="HotelName"), + content_fields=[SemanticField(field_name="Description")], + keywords_fields=[SemanticField(field_name="Category")], + ), + ) + + semantic_search = SemanticSearch(configurations=[semantic_config]) + + index = SearchIndex( + name=index_name, + fields=fields, + vector_search=vector_search, + semantic_search=semantic_search, + ) + + result = index_client.create_or_update_index(index) + print(f"Created: index '{result.name}'") + + +def load_documents(): + with open(documents_path, "r", encoding="utf-8") as handle: + raw = handle.read().strip() + + payload = json.loads(raw) + documents = payload["value"] + + return documents + + +def upload_documents(): + """Upload documents to the search index.""" + search_client = SearchClient(service_endpoint, index_name, AzureKeyCredential(key)) + documents = load_documents() + result = search_client.upload_documents(documents=documents) + print(f"Uploaded: {len(result)} documents to index '{index_name}'") + + +def single_vector_search(): + """Perform a single vector search using a pre-computed query vector.""" + # [START single_vector_search] + search_client = SearchClient(service_endpoint, index_name, AzureKeyCredential(key)) + + vector_query = VectorizedQuery( + vector=vector, + k=5, + fields="DescriptionVector", + ) + + results = search_client.search( + vector_queries=[vector_query], + select=["HotelId", "HotelName", "Description", "Category", "Tags"], + top=5, + ) + + print("Results: single vector search") + for result in results: + print( + f" HotelId: {result['HotelId']}, HotelName: {result['HotelName']}, " + f"Category: {result.get('Category')}" + ) + # [END single_vector_search] + + +def single_vector_search_with_filter(): + """Perform a vector search with a filter applied.""" + # [START single_vector_search_with_filter] + search_client = SearchClient(service_endpoint, index_name, AzureKeyCredential(key)) + + vector_query = VectorizedQuery( + vector=vector, + k=5, + fields="DescriptionVector", + ) + + results = search_client.search( + vector_queries=[vector_query], + filter="Tags/any(tag: tag eq 'free wifi')", + select=["HotelId", "HotelName", "Description", "Category", "Tags"], + top=5, + ) + + print("Results: vector search with filter") + for result in results: + print( + f" HotelId: {result['HotelId']}, HotelName: {result['HotelName']}, " + f"Tags: {result.get('Tags')}" + ) + # [END single_vector_search_with_filter] + + +def simple_hybrid_search(): + """Perform a hybrid search combining vector and text search.""" + # [START simple_hybrid_search] + search_client = SearchClient(service_endpoint, index_name, AzureKeyCredential(key)) + + vector_query = VectorizedQuery( + vector=vector, + k=5, + fields="DescriptionVector", + ) + + results = search_client.search( + search_text="historic hotel walk to restaurants and shopping", + vector_queries=[vector_query], + select=["HotelId", "HotelName", "Description", "Category", "Tags"], + top=5, + ) + + print("Results: hybrid search") + for result in results: + score = result.get("@search.score", "N/A") + print(f" Score: {score}") + print(f" HotelId: {result['HotelId']}") + print(f" HotelName: {result['HotelName']}") + print(f" Description: {result.get('Description')}") + print(f" Category: {result.get('Category')}") + print(f" Tags: {result.get('Tags', 'N/A')}") + print() + # [END simple_hybrid_search] + + +def delete_index(): + index_client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) + index_client.delete_index(index_name) + print(f"Deleted: index '{index_name}'") + + +if __name__ == "__main__": + print("Query: 'quintessential lodging near running trails, eateries, retail'") + try: + create_index() + upload_documents() + single_vector_search() + print() + single_vector_search_with_filter() + print() + simple_hybrid_search() + print() + finally: + delete_index() diff --git a/sdk/search/azure-search-documents/samples/sample_search_client_custom_request.py b/sdk/search/azure-search-documents/samples/sample_search_client_custom_request.py new file mode 100644 index 000000000000..43040de55cbf --- /dev/null +++ b/sdk/search/azure-search-documents/samples/sample_search_client_custom_request.py @@ -0,0 +1,48 @@ +# coding: utf-8 + +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# -------------------------------------------------------------------------- + +""" +DESCRIPTION: + Demonstrates how to make custom HTTP requests using SearchClient. + +USAGE: + python sample_search_client_custom_request.py + + Set the following environment variables before running the sample: + 1) AZURE_SEARCH_SERVICE_ENDPOINT - base URL of your Azure AI Search service + 2) AZURE_SEARCH_INDEX_NAME - target search index name (e.g., "hotels-sample-index") + 3) AZURE_SEARCH_API_KEY - the primary admin key for your search service +""" + +import os + +service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] +index_name = os.environ["AZURE_SEARCH_INDEX_NAME"] +key = os.environ["AZURE_SEARCH_API_KEY"] + + +def sample_send_request(): + from azure.core.credentials import AzureKeyCredential + from azure.core.rest import HttpRequest + from azure.search.documents import SearchClient + from sample_utils import AZURE_SEARCH_API_VERSION + + search_client = SearchClient(service_endpoint, index_name, AzureKeyCredential(key)) + + # The `send_request` method can send custom HTTP requests that share the client's existing pipeline, + # while adding convenience for endpoint construction. + request = HttpRequest( + method="GET", url=f"/docs/$count?api-version={AZURE_SEARCH_API_VERSION}" + ) + response = search_client.send_request(request) + response.raise_for_status() + response_body = response.json() + print(f"Document count: {response_body} (index '{index_name}')") + + +if __name__ == "__main__": + sample_send_request() diff --git a/sdk/search/azure-search-documents/samples/sample_search_client_send_request.py b/sdk/search/azure-search-documents/samples/sample_search_client_send_request.py deleted file mode 100644 index 616d04fe0e52..000000000000 --- a/sdk/search/azure-search-documents/samples/sample_search_client_send_request.py +++ /dev/null @@ -1,47 +0,0 @@ -# coding: utf-8 - -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- - -""" -FILE: sample_search_client_send_request.py - -DESCRIPTION: - This sample demonstrates how to make custom HTTP requests through a client pipeline. - -USAGE: - python sample_search_client_send_request.py - - Set the environment variables with your own values before running the sample: - 1) AZURE_SEARCH_SERVICE_ENDPOINT - the endpoint of your Azure Cognitive Search service - 2) AZURE_SEARCH_INDEX_NAME - the name of your search index (e.g. "hotels-sample-index") - 3) AZURE_SEARCH_API_KEY - your search API key -""" - -import os -from azure.core.credentials import AzureKeyCredential -from azure.core.rest import HttpRequest -from azure.search.documents import SearchClient - - -def sample_send_request(): - endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] - index_name = os.environ["AZURE_SEARCH_INDEX_NAME"] - key = os.environ["AZURE_SEARCH_API_KEY"] - - client = SearchClient(endpoint, index_name, AzureKeyCredential(key)) - - # The `send_request` method can send custom HTTP requests that share the client's existing pipeline, - # while adding convenience for endpoint construction. - request = HttpRequest(method="GET", url=f"/docs/$count?api-version=2024-05-01-preview") - response = client.send_request(request) - response.raise_for_status() - response_body = response.json() - print(response_body) - - -if __name__ == "__main__": - sample_send_request() diff --git a/sdk/search/azure-search-documents/samples/sample_semantic_search.py b/sdk/search/azure-search-documents/samples/sample_semantic_search.py deleted file mode 100644 index c6e5be9c57ad..000000000000 --- a/sdk/search/azure-search-documents/samples/sample_semantic_search.py +++ /dev/null @@ -1,70 +0,0 @@ -# coding: utf-8 - -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- - -""" -FILE: sample_semantic_search.py -DESCRIPTION: - This sample demonstrates how to use semantic search. -USAGE: - python sample_semantic_search.py - - Set the environment variables with your own values before running the sample: - 1) AZURE_SEARCH_SERVICE_ENDPOINT - the endpoint of your Azure Cognitive Search service - 2) AZURE_SEARCH_INDEX_NAME - the name of your search index (e.g. "hotels-sample-index") - 3) AZURE_SEARCH_API_KEY - your search API key -""" - -import os - - -def speller(): - # [START speller] - from azure.core.credentials import AzureKeyCredential - from azure.search.documents import SearchClient - - service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] - index_name = os.environ["AZURE_SEARCH_INDEX_NAME"] - key = os.environ["AZURE_SEARCH_API_KEY"] - - credential = AzureKeyCredential(key) - client = SearchClient(endpoint=service_endpoint, index_name=index_name, credential=credential) - results = list(client.search(search_text="luxury", query_language="en-us", query_speller="lexicon")) - - for result in results: - print("{}\n{}\n)".format(result["hotelId"], result["hotelName"])) - # [END speller] - - -def semantic_ranking(): - # [START semantic_ranking] - from azure.core.credentials import AzureKeyCredential - from azure.search.documents import SearchClient - - service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] - index_name = os.environ["AZURE_SEARCH_INDEX_NAME"] - key = os.environ["AZURE_SEARCH_API_KEY"] - - credential = AzureKeyCredential(key) - client = SearchClient(endpoint=service_endpoint, index_name=index_name, credential=credential) - results = list( - client.search( - search_text="luxury", - query_type="semantic", - semantic_configuration_name="semantic_config_name", - query_language="en-us", - ) - ) - - for result in results: - print("{}\n{}\n)".format(result["hotelId"], result["hotelName"])) - # [END semantic_ranking] - - -if __name__ == "__main__": - speller() - semantic_ranking() diff --git a/sdk/search/azure-search-documents/samples/sample_simple_query.py b/sdk/search/azure-search-documents/samples/sample_simple_query.py deleted file mode 100644 index 9f2317a008e3..000000000000 --- a/sdk/search/azure-search-documents/samples/sample_simple_query.py +++ /dev/null @@ -1,46 +0,0 @@ -# coding: utf-8 - -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- - -""" -FILE: sample_simple_query.py -DESCRIPTION: - This sample demonstrates how to get search results from a basic search text - from an Azure Search index. -USAGE: - python sample_simple_query.py - - Set the environment variables with your own values before running the sample: - 1) AZURE_SEARCH_SERVICE_ENDPOINT - the endpoint of your Azure Cognitive Search service - 2) AZURE_SEARCH_INDEX_NAME - the name of your search index (e.g. "hotels-sample-index") - 3) AZURE_SEARCH_API_KEY - your search API key -""" - -import os - -service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] -index_name = os.environ["AZURE_SEARCH_INDEX_NAME"] -key = os.environ["AZURE_SEARCH_API_KEY"] - - -def simple_text_query(): - # [START simple_query] - from azure.core.credentials import AzureKeyCredential - from azure.search.documents import SearchClient - - search_client = SearchClient(service_endpoint, index_name, AzureKeyCredential(key)) - - results = search_client.search(search_text="spa") - - print("Hotels containing 'spa' in the name (or other fields):") - for result in results: - print(" Name: {} (rating {})".format(result["hotelName"], result["rating"])) - # [END simple_query] - - -if __name__ == "__main__": - simple_text_query() diff --git a/sdk/search/azure-search-documents/samples/sample_suggestions.py b/sdk/search/azure-search-documents/samples/sample_suggestions.py deleted file mode 100644 index e24478418b25..000000000000 --- a/sdk/search/azure-search-documents/samples/sample_suggestions.py +++ /dev/null @@ -1,47 +0,0 @@ -# coding: utf-8 - -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- - -""" -FILE: sample_suggestions.py -DESCRIPTION: - This sample demonstrates how to obtain search suggestions from an Azure - search index -USAGE: - python sample_suggestions.py - - Set the environment variables with your own values before running the sample: - 1) AZURE_SEARCH_SERVICE_ENDPOINT - the endpoint of your Azure Cognitive Search service - 2) AZURE_SEARCH_INDEX_NAME - the name of your search index (e.g. "hotels-sample-index") - 3) AZURE_SEARCH_API_KEY - your search API key -""" - -import os - -service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] -index_name = os.environ["AZURE_SEARCH_INDEX_NAME"] -key = os.environ["AZURE_SEARCH_API_KEY"] - - -def suggest_query(): - # [START suggest_query] - from azure.core.credentials import AzureKeyCredential - from azure.search.documents import SearchClient - - search_client = SearchClient(service_endpoint, index_name, AzureKeyCredential(key)) - - results = search_client.suggest(search_text="coffee", suggester_name="sg") - - print("Search suggestions for 'coffee'") - for result in results: - hotel = search_client.get_document(key=result["hotelId"]) - print(" Text: {} for Hotel: {}".format(repr(result["text"]), hotel["hotelName"])) - # [END suggest_query] - - -if __name__ == "__main__": - suggest_query() diff --git a/sdk/search/azure-search-documents/samples/sample_synonym_map_operations.py b/sdk/search/azure-search-documents/samples/sample_synonym_map_operations.py deleted file mode 100644 index c6f8b9575f40..000000000000 --- a/sdk/search/azure-search-documents/samples/sample_synonym_map_operations.py +++ /dev/null @@ -1,89 +0,0 @@ -# coding: utf-8 - -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- - -""" -FILE: sample_synonym_map_operations.py -DESCRIPTION: - This sample demonstrates how to get, create, update, or delete a Synonym Map. -USAGE: - python sample_synonym_map_operations.py - - Set the environment variables with your own values before running the sample: - 1) AZURE_SEARCH_SERVICE_ENDPOINT - the endpoint of your Azure Cognitive Search service - 2) AZURE_SEARCH_API_KEY - your search API key -""" - -import os - -service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] -key = os.environ["AZURE_SEARCH_API_KEY"] - -from azure.core.credentials import AzureKeyCredential -from azure.search.documents.indexes import SearchIndexClient -from azure.search.documents.indexes.models import SynonymMap - -client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) - - -def create_synonym_map(): - # [START create_synonym_map] - synonyms = [ - "USA, United States, United States of America", - "Washington, Wash. => WA", - ] - synonym_map = SynonymMap(name="test-syn-map", synonyms=synonyms) - result = client.create_synonym_map(synonym_map) - print("Create new Synonym Map 'test-syn-map succeeded") - # [END create_synonym_map] - - -def create_synonym_map_from_file(): - # [START create_synonym_map_from_file] - from os.path import dirname, join, realpath - - CWD = dirname(realpath(__file__)) - file_path = join(CWD, "synonym_map.txt") - with open(file_path, "r") as f: - solr_format_synonyms = f.read() - synonyms = solr_format_synonyms.split("\n") - synonym_map = SynonymMap(name="test-syn-map", synonyms=synonyms) - result = client.create_synonym_map(synonym_map) - print("Create new Synonym Map 'test-syn-map succeeded") - # [END create_synonym_map_from_file] - - -def get_synonym_maps(): - # [START get_synonym_maps] - result = client.get_synonym_maps() - names = [x.name for x in result] - print("Found {} Synonym Maps in the service: {}".format(len(result), ", ".join(names))) - # [END get_synonym_maps] - - -def get_synonym_map(): - # [START get_synonym_map] - result = client.get_synonym_map("test-syn-map") - print("Retrived Synonym Map 'test-syn-map' with synonyms") - if result: - for syn in result.synonyms: - print(" {}".format(syn)) - # [END get_synonym_map] - - -def delete_synonym_map(): - # [START delete_synonym_map] - client.delete_synonym_map("test-syn-map") - print("Synonym Map 'test-syn-map' deleted") - # [END delete_synonym_map] - - -if __name__ == "__main__": - create_synonym_map() - get_synonym_maps() - get_synonym_map() - delete_synonym_map() diff --git a/sdk/search/azure-search-documents/samples/sample_utils.py b/sdk/search/azure-search-documents/samples/sample_utils.py new file mode 100644 index 000000000000..fa6abd801068 --- /dev/null +++ b/sdk/search/azure-search-documents/samples/sample_utils.py @@ -0,0 +1,8 @@ +# coding: utf-8 + +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# -------------------------------------------------------------------------- + +AZURE_SEARCH_API_VERSION = "2025-11-01-preview" diff --git a/sdk/search/azure-search-documents/samples/sample_vector_search.py b/sdk/search/azure-search-documents/samples/sample_vector_search.py deleted file mode 100644 index b7bfbc31d44a..000000000000 --- a/sdk/search/azure-search-documents/samples/sample_vector_search.py +++ /dev/null @@ -1,204 +0,0 @@ -# coding: utf-8 - -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- - -""" -FILE: sample_vector_search.py -DESCRIPTION: - This sample demonstrates how to get search results from a basic search text - from an Azure Search index. -USAGE: - python sample_vector_search.py - - Set the environment variables with your own values before running the sample: - 1) AZURE_SEARCH_SERVICE_ENDPOINT - the endpoint of your Azure Cognitive Search service - 2) AZURE_SEARCH_INDEX_NAME - the name of your search index (e.g. "hotels-sample-index") - 3) AZURE_SEARCH_API_KEY - your search API key -""" - -import os - -from azure.core.credentials import AzureKeyCredential -from azure.search.documents import SearchClient -from azure.search.documents.indexes import SearchIndexClient -from azure.search.documents.models import VectorizedQuery - -service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] -index_name = os.environ["AZURE_SEARCH_INDEX_NAME"] -key = os.environ["AZURE_SEARCH_API_KEY"] - - -def get_embeddings(text: str): - # There are a few ways to get embeddings. This is just one example. - import openai - - open_ai_endpoint = os.getenv("OpenAIEndpoint") - open_ai_key = os.getenv("OpenAIKey") - - client = openai.AzureOpenAI( - azure_endpoint=open_ai_endpoint, - api_key=open_ai_key, - api_version="2023-09-01-preview", - ) - embedding = client.embeddings.create(input=[text], model="text-embedding-ada-002") - return embedding.data[0].embedding - - -def get_hotel_index(name: str): - from azure.search.documents.indexes.models import ( - SearchIndex, - SearchField, - SearchFieldDataType, - SimpleField, - SearchableField, - VectorSearch, - VectorSearchProfile, - HnswAlgorithmConfiguration, - ) - - fields = [ - SimpleField(name="hotelId", type=SearchFieldDataType.String, key=True), - SearchableField( - name="hotelName", - type=SearchFieldDataType.String, - sortable=True, - filterable=True, - ), - SearchableField(name="description", type=SearchFieldDataType.String), - SearchField( - name="descriptionVector", - type=SearchFieldDataType.Collection(SearchFieldDataType.Single), - searchable=True, - vector_search_dimensions=1536, - vector_search_profile_name="my-vector-config", - ), - SearchableField( - name="category", - type=SearchFieldDataType.String, - sortable=True, - filterable=True, - facetable=True, - ), - ] - vector_search = VectorSearch( - profiles=[VectorSearchProfile(name="my-vector-config", algorithm_configuration_name="my-algorithms-config")], - algorithms=[HnswAlgorithmConfiguration(name="my-algorithms-config")], - ) - return SearchIndex(name=name, fields=fields, vector_search=vector_search) - - -def get_hotel_documents(): - docs = [ - { - "hotelId": "1", - "hotelName": "Fancy Stay", - "description": "Best hotel in town if you like luxury hotels.", - "descriptionVector": get_embeddings("Best hotel in town if you like luxury hotels."), - "category": "Luxury", - }, - { - "hotelId": "2", - "hotelName": "Roach Motel", - "description": "Cheapest hotel in town. Infact, a motel.", - "descriptionVector": get_embeddings("Cheapest hotel in town. Infact, a motel."), - "category": "Budget", - }, - { - "hotelId": "3", - "hotelName": "EconoStay", - "description": "Very popular hotel in town.", - "descriptionVector": get_embeddings("Very popular hotel in town."), - "category": "Budget", - }, - { - "hotelId": "4", - "hotelName": "Modern Stay", - "description": "Modern architecture, very polite staff and very clean. Also very affordable.", - "descriptionVector": get_embeddings( - "Modern architecture, very polite staff and very clean. Also very affordable." - ), - "category": "Luxury", - }, - { - "hotelId": "5", - "hotelName": "Secret Point", - "description": "One of the best hotel in town. The hotel is ideally located on the main commercial artery of the city in the heart of New York.", - "descriptionVector": get_embeddings( - "One of the best hotel in town. The hotel is ideally located on the main commercial artery of the city in the heart of New York." - ), - "category": "Boutique", - }, - ] - return docs - - -def single_vector_search(): - # [START single_vector_search] - query = "Top hotels in town" - - search_client = SearchClient(service_endpoint, index_name, AzureKeyCredential(key)) - vector_query = VectorizedQuery(vector=get_embeddings(query), k_nearest_neighbors=3, fields="descriptionVector") - - results = search_client.search( - vector_queries=[vector_query], - select=["hotelId", "hotelName"], - ) - - for result in results: - print(result) - # [END single_vector_search] - - -def single_vector_search_with_filter(): - # [START single_vector_search_with_filter] - query = "Top hotels in town" - - search_client = SearchClient(service_endpoint, index_name, AzureKeyCredential(key)) - vector_query = VectorizedQuery(vector=get_embeddings(query), k_nearest_neighbors=3, fields="descriptionVector") - - results = search_client.search( - search_text="", - vector_queries=[vector_query], - filter="category eq 'Luxury'", - select=["hotelId", "hotelName"], - ) - - for result in results: - print(result) - # [END single_vector_search_with_filter] - - -def simple_hybrid_search(): - # [START simple_hybrid_search] - query = "Top hotels in town" - - search_client = SearchClient(service_endpoint, index_name, AzureKeyCredential(key)) - vector_query = VectorizedQuery(vector=get_embeddings(query), k_nearest_neighbors=3, fields="descriptionVector") - - results = search_client.search( - search_text=query, - vector_queries=[vector_query], - select=["hotelId", "hotelName"], - ) - - for result in results: - print(result) - # [END simple_hybrid_search] - - -if __name__ == "__main__": - credential = AzureKeyCredential(key) - index_client = SearchIndexClient(service_endpoint, credential) - index = get_hotel_index(index_name) - index_client.create_index(index) - client = SearchClient(service_endpoint, index_name, credential) - hotel_docs = get_hotel_documents() - client.upload_documents(documents=hotel_docs) - - single_vector_search() - single_vector_search_with_filter() - simple_hybrid_search() From 84905e825f0d3b6f940b15c3334f0804454dd8e4 Mon Sep 17 00:00:00 2001 From: aprilk-ms <55356546+aprilk-ms@users.noreply.github.com> Date: Fri, 23 Jan 2026 10:54:31 -0800 Subject: [PATCH 07/18] Added more evaluation sample tests (set 2) (#44812) * Added more eval sample tests * Address PR review comments: async executor consistency, robust timestamp regex * Use specific regex patterns for timestamp sanitization --- sdk/ai/azure-ai-projects/assets.json | 2 +- sdk/ai/azure-ai-projects/tests/conftest.py | 12 +++ .../tests/samples/sample_executor.py | 27 +++++- .../tests/samples/test_samples_evaluations.py | 90 +++++++++++++++++-- 4 files changed, 124 insertions(+), 7 deletions(-) diff --git a/sdk/ai/azure-ai-projects/assets.json b/sdk/ai/azure-ai-projects/assets.json index 9c6015cc80f1..490af2ec735e 100644 --- a/sdk/ai/azure-ai-projects/assets.json +++ b/sdk/ai/azure-ai-projects/assets.json @@ -2,5 +2,5 @@ "AssetsRepo": "Azure/azure-sdk-assets", "AssetsRepoPrefixPath": "python", "TagPrefix": "python/ai/azure-ai-projects", - "Tag": "python/ai/azure-ai-projects_257daffdb5" + "Tag": "python/ai/azure-ai-projects_6f9985fe6d" } diff --git a/sdk/ai/azure-ai-projects/tests/conftest.py b/sdk/ai/azure-ai-projects/tests/conftest.py index dfae2c1550cb..982d1b3ab7bb 100644 --- a/sdk/ai/azure-ai-projects/tests/conftest.py +++ b/sdk/ai/azure-ai-projects/tests/conftest.py @@ -122,6 +122,18 @@ def sanitize_url_paths(): # Sanitize eval dataset names with timestamps (e.g., eval-data-2026-01-19_040648_UTC) add_general_regex_sanitizer(regex=r"eval-data-\d{4}-\d{2}-\d{2}_\d{6}_UTC", value="eval-data-sanitized-timestamp") + # Sanitize Unix timestamps in eval names (from sample_redteam_evaluations.py) + # Pattern 1: "Red Team Agent Safety Evaluation -" + add_general_regex_sanitizer( + regex=r"Evaluation -\d{10}", + value="Evaluation -SANITIZED-TS" + ) + # Pattern 2: "Eval Run for -" (agent name already sanitized) + add_general_regex_sanitizer( + regex=r"sanitized-agent-name -\d{10}", + value="sanitized-agent-name -SANITIZED-TS" + ) + # Sanitize API key from service response (this includes Application Insights connection string) add_body_key_sanitizer(json_path="credentials.key", value="sanitized-api-key") diff --git a/sdk/ai/azure-ai-projects/tests/samples/sample_executor.py b/sdk/ai/azure-ai-projects/tests/samples/sample_executor.py index d23b17295716..a15b9b72b31d 100644 --- a/sdk/ai/azure-ai-projects/tests/samples/sample_executor.py +++ b/sdk/ai/azure-ai-projects/tests/samples/sample_executor.py @@ -11,6 +11,7 @@ import inspect import importlib.util import functools + from dataclasses import dataclass, field from typing import overload, Union, Optional from pydantic import BaseModel @@ -22,7 +23,12 @@ from azure.core.credentials_async import AsyncTokenCredential from devtools_testutils.fake_credentials import FakeTokenCredential from devtools_testutils.fake_credentials_async import AsyncFakeCredential +from devtools_testutils import is_live from azure.ai.projects import AIProjectClient + +# Fixed timestamp for playback mode (Nov 2023). +# Must match the timestamp sanitizers in conftest.py (e.g., `Evaluation -\d{10}`). +PLAYBACK_TIMESTAMP = 1700000000 from pytest import MonkeyPatch from azure.ai.projects.aio import AIProjectClient as AsyncAIProjectClient @@ -312,6 +318,16 @@ def execute(self, patched_open_fn=None): mock.patch("builtins.open", side_effect=patched_open_fn), ): self.spec.loader.exec_module(self.module) + # In playback mode, patch time functions on the module: + # - time.sleep: avoid waiting for polling loops (instant) + # - time.time: return fixed value for deterministic request bodies + # Must be done after exec_module so the module's 'time' reference can be patched. + if not is_live() and hasattr(self.module, "time"): + self.module.time.sleep = lambda _: None + self.module.time.time = lambda: PLAYBACK_TIMESTAMP + # Call main() if it exists (samples wrap their code in main()) + if hasattr(self.module, "main") and callable(self.module.main): + self.module.main() def validate_print_calls_by_llm( self, @@ -379,7 +395,16 @@ async def execute_async(self, patched_open_fn=None): if self.spec.loader is None: raise ImportError(f"Could not load module {self.spec.name} from {self.sample_path}") self.spec.loader.exec_module(self.module) - await self.module.main() + # In playback mode, patch time functions on the module: + # - time.sleep: avoid waiting for polling loops (instant) + # - time.time: return fixed value for deterministic request bodies + # Must be done after exec_module so the module's 'time' reference can be patched. + if not is_live() and hasattr(self.module, "time"): + self.module.time.sleep = lambda _: None + self.module.time.time = lambda: PLAYBACK_TIMESTAMP + # Call main() if it exists (samples wrap their code in main()) + if hasattr(self.module, "main") and callable(self.module.main): + await self.module.main() async def validate_print_calls_by_llm_async( self, diff --git a/sdk/ai/azure-ai-projects/tests/samples/test_samples_evaluations.py b/sdk/ai/azure-ai-projects/tests/samples/test_samples_evaluations.py index ef671dd22369..acb3c5c2fd04 100644 --- a/sdk/ai/azure-ai-projects/tests/samples/test_samples_evaluations.py +++ b/sdk/ai/azure-ai-projects/tests/samples/test_samples_evaluations.py @@ -4,6 +4,7 @@ # Licensed under the MIT License. # ------------------------------------ import functools +import os import pytest from devtools_testutils import recorded_by_proxy, AzureRecordedTestCase, RecordedTransport, EnvironmentVariableLoader from sample_executor import ( @@ -44,7 +45,9 @@ class TestSamplesEvaluations(AzureRecordedTestCase): """ Tests for evaluation samples. - Included samples (9): + Included samples (25): + + Main evaluation samples (10): - sample_agent_evaluation.py - sample_model_evaluation.py - sample_agent_response_evaluation.py @@ -54,8 +57,24 @@ class TestSamplesEvaluations(AzureRecordedTestCase): - sample_eval_catalog_code_based_evaluators.py - sample_eval_catalog_prompt_based_evaluators.py - sample_evaluation_compare_insight.py + - sample_redteam_evaluations.py - More samples will be added in the future. + Agentic evaluator samples (15): + - sample_coherence.py + - sample_fluency.py + - sample_groundedness.py + - sample_intent_resolution.py + - sample_relevance.py + - sample_response_completeness.py + - sample_task_adherence.py + - sample_task_completion.py + - sample_task_navigation_efficiency.py + - sample_tool_call_accuracy.py + - sample_tool_call_success.py + - sample_tool_input_accuracy.py + - sample_tool_output_utilization.py + - sample_tool_selection.py + - sample_generic_agentic_evaluator.py Excluded samples and reasons: @@ -76,13 +95,11 @@ class TestSamplesEvaluations(AzureRecordedTestCase): - sample_evaluations_builtin_with_traces.py: Requires Azure Application Insights and uses azure-monitor-query to fetch traces. - sample_scheduled_evaluations.py: Requires Azure RBAC assignment via - azure-mgmt-authorization and azure-mgmt-resource. + azure-mgmt-authorization and azure-mgmt-resource, AND uploads Dataset. Complex prerequisites (require manual portal setup): - sample_continuous_evaluation_rule.py: Requires manual RBAC assignment in Azure Portal to enable continuous evaluation. - - sample_redteam_evaluations.py: Red team evaluations may require special - permissions or setup. """ # To run this test with a specific sample, use: @@ -102,6 +119,7 @@ class TestSamplesEvaluations(AzureRecordedTestCase): "sample_eval_catalog_prompt_based_evaluators.py", "sample_evaluation_compare_insight.py", "sample_agent_response_evaluation_with_function_tool.py", + "sample_redteam_evaluations.py", ], ), ) @@ -116,3 +134,65 @@ def test_evaluation_samples(self, sample_path: str, **kwargs) -> None: project_endpoint=kwargs["azure_ai_project_endpoint"], model=kwargs["azure_ai_model_deployment_name"], ) + + # To run this test with a specific sample, use: + # pytest tests/samples/test_samples_evaluations.py::TestSamplesEvaluations::test_agentic_evaluator_samples[sample_coherence] + @evaluationsPreparer() + @pytest.mark.parametrize( + "sample_path", + get_sample_paths( + "evaluations/agentic_evaluators", + samples_to_test=[ + "sample_coherence.py", + "sample_fluency.py", + "sample_groundedness.py", + "sample_intent_resolution.py", + "sample_relevance.py", + "sample_response_completeness.py", + "sample_task_adherence.py", + "sample_task_completion.py", + "sample_task_navigation_efficiency.py", + "sample_tool_call_accuracy.py", + "sample_tool_call_success.py", + "sample_tool_input_accuracy.py", + "sample_tool_output_utilization.py", + "sample_tool_selection.py", + ], + ), + ) + @SamplePathPasser() + @recorded_by_proxy(RecordedTransport.AZURE_CORE, RecordedTransport.HTTPX) + def test_agentic_evaluator_samples(self, sample_path: str, **kwargs) -> None: + env_var_mapping = get_sample_environment_variables_map(kwargs) + executor = SyncSampleExecutor(self, sample_path, env_var_mapping=env_var_mapping, **kwargs) + executor.execute() + executor.validate_print_calls_by_llm( + instructions=evaluations_instructions, + project_endpoint=kwargs["azure_ai_project_endpoint"], + model=kwargs["azure_ai_model_deployment_name"], + ) + + # To run this test, use: + # pytest tests/samples/test_samples_evaluations.py::TestSamplesEvaluations::test_generic_agentic_evaluator_sample + @evaluationsPreparer() + @recorded_by_proxy(RecordedTransport.AZURE_CORE, RecordedTransport.HTTPX) + def test_generic_agentic_evaluator_sample(self, **kwargs) -> None: + # Manually construct path to nested sample + current_dir = os.path.dirname(os.path.abspath(__file__)) + samples_folder = os.path.normpath(os.path.join(current_dir, os.pardir, os.pardir)) + sample_path = os.path.join( + samples_folder, + "samples", + "evaluations", + "agentic_evaluators", + "sample_generic_agentic_evaluator", + "sample_generic_agentic_evaluator.py", + ) + env_var_mapping = get_sample_environment_variables_map(kwargs) + executor = SyncSampleExecutor(self, sample_path, env_var_mapping=env_var_mapping, **kwargs) + executor.execute() + executor.validate_print_calls_by_llm( + instructions=evaluations_instructions, + project_endpoint=kwargs["azure_ai_project_endpoint"], + model=kwargs["azure_ai_model_deployment_name"], + ) From 3645e13a6c3fe4d61f18478d1f3da537717c2fcf Mon Sep 17 00:00:00 2001 From: Libba Lawrence Date: Fri, 23 Jan 2026 14:31:26 -0800 Subject: [PATCH 08/18] update disables for next-* (#44828) * update test disables * updates * this * bump --- eng/pylintrc | 2 +- eng/samples_pylintrc | 66 +++++++++++++++++++++ eng/test_pylintrc | 4 +- eng/tools/azure-sdk-tools/azpysdk/pylint.py | 7 ++- eng/tox/run_pylint.py | 7 ++- 5 files changed, 77 insertions(+), 9 deletions(-) create mode 100644 eng/samples_pylintrc diff --git a/eng/pylintrc b/eng/pylintrc index dd3a8b0605b0..ef80940dd924 100644 --- a/eng/pylintrc +++ b/eng/pylintrc @@ -9,7 +9,7 @@ ignore-paths= azure/mixedreality/remoterendering/_api_version.py, (?:.*[/\\]|^).*[/\\](projects|agents)[/\\](models/_models.py|_model_base.py|operations/_operations.py|aio/operations/_operations.py)$, # Exclude any path that contains the following directory names - (?:.*[/\\]|^)(?:_vendor|_generated|_restclient|examples|doc|\.tox)(?:[/\\]|$) + (?:.*[/\\]|^)(?:_vendor|_generated|_restclient|samples|tests|test|examples|doc|\.tox)(?:[/\\]|$) load-plugins=pylint_guidelines_checker diff --git a/eng/samples_pylintrc b/eng/samples_pylintrc new file mode 100644 index 000000000000..edab88a322f7 --- /dev/null +++ b/eng/samples_pylintrc @@ -0,0 +1,66 @@ +[MASTER] +py-version=3.10 +ignore-patterns=conftest,setup +reports=no + +# PYLINT DIRECTORY BLACKLIST. +ignore-paths= + azure\\mixedreality\\remoterendering\\_api_version.py, + azure/mixedreality/remoterendering/_api_version.py, + (?:.*[/\\]|^).*[/\\](projects|agents)[/\\](models/_models.py|_model_base.py|operations/_operations.py|aio/operations/_operations.py)$, + # Exclude any path that contains the following directory names + (?:.*[/\\]|^)(?:_vendor|_generated|_restclient|tests|test|examples|doc|\.tox)(?:[/\\]|$) + +load-plugins=pylint_guidelines_checker + +[MESSAGES CONTROL] +# For all codes, run 'pylint --list-msgs' or go to 'https://pylint.pycqa.org/en/latest/technical_reference/features.html' +# locally-disabled: Warning locally suppressed using disable-msg +# cyclic-import: because of https://github.com/PyCQA/pylint/issues/850 +# too-many-arguments: Due to the nature of the CLI many commands have large arguments set which reflect in large arguments set in corresponding methods. +# Let's black deal with bad-continuation + +# Added disables from super-with-arguments and do-not-import-asyncio for samples +disable=useless-object-inheritance,missing-docstring,locally-disabled,fixme,cyclic-import,too-many-arguments,invalid-name,duplicate-code,too-few-public-methods,consider-using-f-string,super-with-arguments,redefined-builtin,import-outside-toplevel,client-suffix-needed,unnecessary-dunder-call,unnecessary-ellipsis,client-paging-methods-use-list,consider-using-max-builtin,too-many-lines,possibly-used-before-assignment,do-not-hardcode-dedent,arguments-differ,signature-differs,deprecated-class,too-many-positional-arguments,do-not-import-asyncio + + +[FORMAT] +max-line-length=120 + +[VARIABLES] +# Tells whether we should check for unused import in __init__ files. +init-import=yes + +[DESIGN] +# Maximum number of locals for function / method body +max-locals=25 +# Maximum number of branch for function / method body +max-branches=20 +# Maximum number of instance attributes for class +max-attributes=10 +# Maximum number of ancestors +max-parents=15 + +[SIMILARITIES] +min-similarity-lines=10 + +[BASIC] +# Naming hints based on PEP 8 (https://www.python.org/dev/peps/pep-0008/#naming-conventions). +# Consider these guidelines and not hard rules. Read PEP 8 for more details. + +# The invalid-name checker must be **enabled** for these hints to be used. +include-naming-hint=yes + +module-naming-style=snake_case +const-naming-style=UPPER_CASE +class-naming-style=PascalCase +class-attribute-naming-style=snake_case +attr-naming-style=snake_case +method-naming-style=snake_case +function-naming-style=snake_case +argument-naming-style=snake_case +variable-naming-style=snake_case +inlinevar-naming-style=snake_case + +[TYPECHECK] +generated-members=js.* diff --git a/eng/test_pylintrc b/eng/test_pylintrc index b95de95de206..08aade52866a 100644 --- a/eng/test_pylintrc +++ b/eng/test_pylintrc @@ -9,7 +9,7 @@ ignore-paths= azure/mixedreality/remoterendering/_api_version.py, (?:.*[/\\]|^).*[/\\](projects|agents)[/\\](models/_models.py|_model_base.py|operations/_operations.py|aio/operations/_operations.py)$, # Exclude any path that contains the following directory names - (?:.*[/\\]|^)(?:_vendor|_generated|_restclient|examples|doc|\.tox)(?:[/\\]|$) + (?:.*[/\\]|^)(?:_vendor|_generated|_restclient|examples|samples|doc|\.tox)(?:[/\\]|$) load-plugins=pylint_guidelines_checker @@ -21,7 +21,7 @@ load-plugins=pylint_guidelines_checker # Let's black deal with bad-continuation # Added disables from super-with-arguments and client formatting rules for tests -disable=useless-object-inheritance,missing-docstring,locally-disabled,fixme,cyclic-import,too-many-arguments,invalid-name,duplicate-code,too-few-public-methods,consider-using-f-string,super-with-arguments,redefined-builtin,import-outside-toplevel,client-suffix-needed,unnecessary-dunder-call,unnecessary-ellipsis,client-paging-methods-use-list,consider-using-max-builtin,too-many-lines,possibly-used-before-assignment,do-not-hardcode-dedent,arguments-differ,signature-differs,deprecated-class,too-many-positional-arguments,missing-client-constructor-parameter-credential,missing-client-constructor-parameter-kwargs,unapproved-client-method-name-prefix,client-method-has-more-than-5-positional-arguments,client-method-missing-type-annotations,client-method-missing-kwargs,client-method-name-no-double-underscore,client-method-missing-tracing-decorator,client-method-missing-tracing-decorator-async,client-incorrect-naming-convention,client-docstring-use-literal-include,client-lro-methods-use-polling,lro-methods-use-correct-naming,specify-parameter-names-in-call +disable=useless-object-inheritance,missing-docstring,locally-disabled,fixme,cyclic-import,too-many-arguments,invalid-name,duplicate-code,too-few-public-methods,consider-using-f-string,super-with-arguments,redefined-builtin,import-outside-toplevel,client-suffix-needed,unnecessary-dunder-call,unnecessary-ellipsis,client-paging-methods-use-list,consider-using-max-builtin,too-many-lines,possibly-used-before-assignment,do-not-hardcode-dedent,arguments-differ,signature-differs,deprecated-class,too-many-positional-arguments,missing-client-constructor-parameter-credential,missing-client-constructor-parameter-kwargs,unapproved-client-method-name-prefix,client-method-has-more-than-5-positional-arguments,client-method-missing-type-annotations,client-method-missing-kwargs,client-method-name-no-double-underscore,client-method-missing-tracing-decorator,client-method-missing-tracing-decorator-async,client-incorrect-naming-convention,client-docstring-use-literal-include,client-lro-methods-use-polling,lro-methods-use-correct-naming,specify-parameter-names-in-call,protected-access,name-too-long,missing-function-docstring,missing-class-docstring,missing-module-docstring,docstring-missing-param,docstring-missing-type,docstring-missing-return,docstring-missing-rtype,docstring-should-be-keyword,docstring-admonition-needs-newline,docstring-keyword-should-match-keyword-only,docstring-type-do-not-use-class,do-not-import-asyncio,config-missing-kwargs-in-policy,client-method-should-not-use-static-method,file-needs-copyright-header,async-client-bad-name,connection-string-should-not-be-constructor-param,package-name-incorrect,naming-mismatch,enum-must-be-uppercase,enum-must-inherit-case-insensitive-enum-meta,client-accepts-api-version-keyword,non-abstract-transport-import,delete-operation-wrong-return-type,networking-import-outside-azure-core-transport,no-raise-with-traceback,no-legacy-azure-core-http-response-import,do-not-import-legacy-six,no-typing-import-in-type-check,do-not-use-legacy-typing,do-not-log-raised-errors,invalid-use-of-overload,do-not-log-exceptions-if-not-debug,do-not-hardcode-connection-verify,missing-user-agent-policy,missing-logging-policy,missing-retry-policy,missing-distributed-tracing-policy,remove-deprecated-iscoroutinefunction,singleton-comparison [FORMAT] diff --git a/eng/tools/azure-sdk-tools/azpysdk/pylint.py b/eng/tools/azure-sdk-tools/azpysdk/pylint.py index 355ae1cf705d..7ec91bb504f3 100644 --- a/eng/tools/azure-sdk-tools/azpysdk/pylint.py +++ b/eng/tools/azure-sdk-tools/azpysdk/pylint.py @@ -177,15 +177,16 @@ def run(self, args: argparse.Namespace) -> int: ) results.append(e.returncode) - # Run samples with main pylintrc + # Run samples with samples_pylintrc if os.path.exists(samples_dir): try: + samples_rcfile = os.path.join(REPO_ROOT, "eng/samples_pylintrc") logger.info( [ executable, "-m", "pylint", - "--rcfile={}".format(rcFileLocation), + "--rcfile={}".format(samples_rcfile), "--output-format=parseable", samples_dir, ] @@ -196,7 +197,7 @@ def run(self, args: argparse.Namespace) -> int: executable, "-m", "pylint", - "--rcfile={}".format(rcFileLocation), + "--rcfile={}".format(samples_rcfile), "--output-format=parseable", samples_dir, ] diff --git a/eng/tox/run_pylint.py b/eng/tox/run_pylint.py index 29ab80dd1849..a7a89bc39c97 100644 --- a/eng/tox/run_pylint.py +++ b/eng/tox/run_pylint.py @@ -108,18 +108,19 @@ ) exit_code = max(exit_code, e.returncode) - # Run samples with main pylintrc + # Run samples with samples_pylintrc logging.info(f"Checking samples directory: {samples_dir}") logging.info(f"Samples directory exists: {os.path.exists(samples_dir)}") if os.path.exists(samples_dir): try: - logging.info(f"Running pylint on samples with config: {rcFileLocation}") + samples_rcfile = os.path.join(root_dir, "eng/samples_pylintrc") + logging.info(f"Running pylint on samples with config: {samples_rcfile}") check_call( [ sys.executable, "-m", "pylint", - "--rcfile={}".format(rcFileLocation), + "--rcfile={}".format(samples_rcfile), "--output-format=parseable", samples_dir ] From 7956c88c86649d7e2aca76c5cbae7e57c045ad1a Mon Sep 17 00:00:00 2001 From: Azure SDK Bot <53356347+azure-sdk@users.noreply.github.com> Date: Fri, 23 Jan 2026 15:30:46 -0800 Subject: [PATCH 09/18] bump default doc-warden version (#44833) Co-authored-by: Scott Beddall --- eng/common/scripts/Verify-Readme.ps1 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/eng/common/scripts/Verify-Readme.ps1 b/eng/common/scripts/Verify-Readme.ps1 index 4e2cd6522497..05779d9aa3dc 100644 --- a/eng/common/scripts/Verify-Readme.ps1 +++ b/eng/common/scripts/Verify-Readme.ps1 @@ -9,7 +9,7 @@ param ( [string]$SettingsPath ) . (Join-Path $PSScriptRoot common.ps1) -$DefaultDocWardenVersion = "0.7.2" +$DefaultDocWardenVersion = "0.7.3" $script:FoundError = $false function Test-Readme-Files { From 60a9d5a49f8b6e9554524445419e02de08fedd2a Mon Sep 17 00:00:00 2001 From: Azure SDK Bot <53356347+azure-sdk@users.noreply.github.com> Date: Fri, 23 Jan 2026 15:51:25 -0800 Subject: [PATCH 10/18] Adding new documentation about code customizations for TypeSpec-based Azure SDKs (#44814) --- .../knowledge/customizing-client-tsp.md | 35 +++++++++++++++++++ 1 file changed, 35 insertions(+) diff --git a/eng/common/knowledge/customizing-client-tsp.md b/eng/common/knowledge/customizing-client-tsp.md index 04ec330d38da..e9a4520541b1 100644 --- a/eng/common/knowledge/customizing-client-tsp.md +++ b/eng/common/knowledge/customizing-client-tsp.md @@ -473,3 +473,38 @@ interface MyClient { ``` This reference provides the essential patterns and decorators for TypeSpec client customizations. Focus on the core decorators (`@client`, `@operationGroup`, `@@clientLocation`, `@@clientName`, `@@access`) for most scenarios, and use advanced features selectively. +--- + +## When TypeSpec Isn't Enough: Code Customizations + +TypeSpec customizations (`client.tsp`) should be your **first choice** for SDK customization - they're clean, documented, and survive regeneration. + +For scenarios that TypeSpec cannot express, each language has its own post-generation code customization approach. Refer to the language-specific documentation below. + +### Language-Specific Code Customization Guides + +When you need post-generation customizations, refer to the language-specific documentation: + +| Language | Documentation | Pattern | +|----------|--------------|---------| +| **C#** | [C# Customization Guide](https://github.com/microsoft/typespec/blob/main/packages/http-client-csharp/.tspd/docs/customization.md) | Partial classes with `[CodeGenType]`, `[CodeGenMember]`, `[CodeGenSerialization]` attributes | +| **Python** | [Python Customization Guide](https://github.com/Azure/autorest.python/blob/main/docs/customizations.md) | `_patch.py` files at models, operations, and client levels | +| **Java** | [Java Customization Guide](https://github.com/Azure/autorest.java/blob/main/customization-base/README.md) | `Customization` class with `customizeAst()` method | +| **JavaScript** | [JS Customization Guide](https://github.com/Azure/azure-sdk-for-js/wiki/Modular-(DPG)-Customization-Guide) | Copy `src/` to `generated/`, add customizations in `src/` | +| **Go** | [Go Customization Guide](https://github.com/Azure/azure-sdk-for-go/blob/main/documentation/development/generate.md) | Prefer TypeSpec; use custom wrapper files for advanced cases | + +### Decision Flow + +``` +Need to customize SDK? + │ + ā–¼ + Can it be done in TypeSpec? + │ + ā”Œā”€ā”€ā”€ā”€ā”“ā”€ā”€ā”€ā”€ā” + Yes No + │ │ + ā–¼ ā–¼ +Use client.tsp Use code customization +decorators (see language guide above) +``` \ No newline at end of file From ddee8a1cdb6911af82efbc46ab0999fb8641c11e Mon Sep 17 00:00:00 2001 From: Azure SDK Bot <53356347+azure-sdk@users.noreply.github.com> Date: Fri, 23 Jan 2026 18:33:48 -0800 Subject: [PATCH 11/18] Bump tar (#44838) Bumps the npm_and_yarn group with 1 update in the /eng/common/tsp-client directory: [tar](https://github.com/isaacs/node-tar). Updates `tar` from 7.4.3 to 7.5.3 - [Release notes](https://github.com/isaacs/node-tar/releases) - [Changelog](https://github.com/isaacs/node-tar/blob/main/CHANGELOG.md) - [Commits](https://github.com/isaacs/node-tar/compare/v7.4.3...v7.5.3) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- eng/common/tsp-client/package-lock.json | 143 ++++++++++++++++++------ 1 file changed, 107 insertions(+), 36 deletions(-) diff --git a/eng/common/tsp-client/package-lock.json b/eng/common/tsp-client/package-lock.json index 2f8ccd4ab07e..129568b26b71 100644 --- a/eng/common/tsp-client/package-lock.json +++ b/eng/common/tsp-client/package-lock.json @@ -332,6 +332,7 @@ "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.27.1.tgz", "integrity": "sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg==", "license": "MIT", + "peer": true, "dependencies": { "@babel/helper-validator-identifier": "^7.27.1", "js-tokens": "^4.0.0", @@ -346,6 +347,7 @@ "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.27.1.tgz", "integrity": "sha512-D2hP9eA+Sqx1kBZgzxZh0y1trbuU+JoDkiEwqhQ36nodYqJwyEIhPSdMNd7lOm/4io72luTPWH20Yda0xOuUow==", "license": "MIT", + "peer": true, "engines": { "node": ">=6.9.0" } @@ -355,6 +357,7 @@ "resolved": "https://registry.npmjs.org/@inquirer/checkbox/-/checkbox-4.2.2.tgz", "integrity": "sha512-E+KExNurKcUJJdxmjglTl141EwxWyAHplvsYJQgSwXf8qiNWkTxTuCCqmhFEmbIXd4zLaGMfQFJ6WrZ7fSeV3g==", "license": "MIT", + "peer": true, "dependencies": { "@inquirer/core": "^10.2.0", "@inquirer/figures": "^1.0.13", @@ -379,6 +382,7 @@ "resolved": "https://registry.npmjs.org/@inquirer/confirm/-/confirm-5.1.16.tgz", "integrity": "sha512-j1a5VstaK5KQy8Mu8cHmuQvN1Zc62TbLhjJxwHvKPPKEoowSF6h/0UdOpA9DNdWZ+9Inq73+puRq1df6OJ8Sag==", "license": "MIT", + "peer": true, "dependencies": { "@inquirer/core": "^10.2.0", "@inquirer/type": "^3.0.8" @@ -400,6 +404,7 @@ "resolved": "https://registry.npmjs.org/@inquirer/core/-/core-10.2.0.tgz", "integrity": "sha512-NyDSjPqhSvpZEMZrLCYUquWNl+XC/moEcVFqS55IEYIYsY0a1cUCevSqk7ctOlnm/RaSBU5psFryNlxcmGrjaA==", "license": "MIT", + "peer": true, "dependencies": { "@inquirer/figures": "^1.0.13", "@inquirer/type": "^3.0.8", @@ -427,6 +432,7 @@ "resolved": "https://registry.npmjs.org/@inquirer/editor/-/editor-4.2.18.tgz", "integrity": "sha512-yeQN3AXjCm7+Hmq5L6Dm2wEDeBRdAZuyZ4I7tWSSanbxDzqM0KqzoDbKM7p4ebllAYdoQuPJS6N71/3L281i6w==", "license": "MIT", + "peer": true, "dependencies": { "@inquirer/core": "^10.2.0", "@inquirer/external-editor": "^1.0.1", @@ -449,6 +455,7 @@ "resolved": "https://registry.npmjs.org/@inquirer/expand/-/expand-4.0.18.tgz", "integrity": "sha512-xUjteYtavH7HwDMzq4Cn2X4Qsh5NozoDHCJTdoXg9HfZ4w3R6mxV1B9tL7DGJX2eq/zqtsFjhm0/RJIMGlh3ag==", "license": "MIT", + "peer": true, "dependencies": { "@inquirer/core": "^10.2.0", "@inquirer/type": "^3.0.8", @@ -471,6 +478,7 @@ "resolved": "https://registry.npmjs.org/@inquirer/external-editor/-/external-editor-1.0.1.tgz", "integrity": "sha512-Oau4yL24d2B5IL4ma4UpbQigkVhzPDXLoqy1ggK4gnHg/stmkffJE4oOXHXF3uz0UEpywG68KcyXsyYpA1Re/Q==", "license": "MIT", + "peer": true, "dependencies": { "chardet": "^2.1.0", "iconv-lite": "^0.6.3" @@ -492,6 +500,7 @@ "resolved": "https://registry.npmjs.org/@inquirer/figures/-/figures-1.0.13.tgz", "integrity": "sha512-lGPVU3yO9ZNqA7vTYz26jny41lE7yoQansmqdMLBEfqaGsmdg7V3W9mK9Pvb5IL4EVZ9GnSDGMO/cJXud5dMaw==", "license": "MIT", + "peer": true, "engines": { "node": ">=18" } @@ -501,6 +510,7 @@ "resolved": "https://registry.npmjs.org/@inquirer/input/-/input-4.2.2.tgz", "integrity": "sha512-hqOvBZj/MhQCpHUuD3MVq18SSoDNHy7wEnQ8mtvs71K8OPZVXJinOzcvQna33dNYLYE4LkA9BlhAhK6MJcsVbw==", "license": "MIT", + "peer": true, "dependencies": { "@inquirer/core": "^10.2.0", "@inquirer/type": "^3.0.8" @@ -522,6 +532,7 @@ "resolved": "https://registry.npmjs.org/@inquirer/number/-/number-3.0.18.tgz", "integrity": "sha512-7exgBm52WXZRczsydCVftozFTrrwbG5ySE0GqUd2zLNSBXyIucs2Wnm7ZKLe/aUu6NUg9dg7Q80QIHCdZJiY4A==", "license": "MIT", + "peer": true, "dependencies": { "@inquirer/core": "^10.2.0", "@inquirer/type": "^3.0.8" @@ -543,6 +554,7 @@ "resolved": "https://registry.npmjs.org/@inquirer/password/-/password-4.0.18.tgz", "integrity": "sha512-zXvzAGxPQTNk/SbT3carAD4Iqi6A2JS2qtcqQjsL22uvD+JfQzUrDEtPjLL7PLn8zlSNyPdY02IiQjzoL9TStA==", "license": "MIT", + "peer": true, "dependencies": { "@inquirer/core": "^10.2.0", "@inquirer/type": "^3.0.8", @@ -565,6 +577,7 @@ "resolved": "https://registry.npmjs.org/@inquirer/prompts/-/prompts-7.8.4.tgz", "integrity": "sha512-MuxVZ1en1g5oGamXV3DWP89GEkdD54alcfhHd7InUW5BifAdKQEK9SLFa/5hlWbvuhMPlobF0WAx7Okq988Jxg==", "license": "MIT", + "peer": true, "dependencies": { "@inquirer/checkbox": "^4.2.2", "@inquirer/confirm": "^5.1.16", @@ -594,6 +607,7 @@ "resolved": "https://registry.npmjs.org/@inquirer/rawlist/-/rawlist-4.1.6.tgz", "integrity": "sha512-KOZqa3QNr3f0pMnufzL7K+nweFFCCBs6LCXZzXDrVGTyssjLeudn5ySktZYv1XiSqobyHRYYK0c6QsOxJEhXKA==", "license": "MIT", + "peer": true, "dependencies": { "@inquirer/core": "^10.2.0", "@inquirer/type": "^3.0.8", @@ -616,6 +630,7 @@ "resolved": "https://registry.npmjs.org/@inquirer/search/-/search-3.1.1.tgz", "integrity": "sha512-TkMUY+A2p2EYVY3GCTItYGvqT6LiLzHBnqsU1rJbrpXUijFfM6zvUx0R4civofVwFCmJZcKqOVwwWAjplKkhxA==", "license": "MIT", + "peer": true, "dependencies": { "@inquirer/core": "^10.2.0", "@inquirer/figures": "^1.0.13", @@ -639,6 +654,7 @@ "resolved": "https://registry.npmjs.org/@inquirer/select/-/select-4.3.2.tgz", "integrity": "sha512-nwous24r31M+WyDEHV+qckXkepvihxhnyIaod2MG7eCE6G0Zm/HUF6jgN8GXgf4U7AU6SLseKdanY195cwvU6w==", "license": "MIT", + "peer": true, "dependencies": { "@inquirer/core": "^10.2.0", "@inquirer/figures": "^1.0.13", @@ -663,6 +679,7 @@ "resolved": "https://registry.npmjs.org/@inquirer/type/-/type-3.0.8.tgz", "integrity": "sha512-lg9Whz8onIHRthWaN1Q9EGLa/0LFJjyM8mEUbL1eTi6yMGvBf8gvyDLtxSXztQsxMvhxxNpJYrwa1YHdq+w4Jw==", "license": "MIT", + "peer": true, "engines": { "node": ">=18" }, @@ -680,6 +697,7 @@ "resolved": "https://registry.npmjs.org/@isaacs/fs-minipass/-/fs-minipass-4.0.1.tgz", "integrity": "sha512-wgm9Ehl2jpeqP3zw/7mo3kRHFp5MEDhqAdwy1fTGkHAwnkGOVsgpvQhL8B5n1qlb01jV3n/bI0ZfZp5lWA1k4w==", "license": "ISC", + "peer": true, "dependencies": { "minipass": "^7.0.4" }, @@ -707,6 +725,7 @@ "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", "license": "MIT", + "peer": true, "dependencies": { "@nodelib/fs.stat": "2.0.5", "run-parallel": "^1.1.9" @@ -720,6 +739,7 @@ "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", "license": "MIT", + "peer": true, "engines": { "node": ">= 8" } @@ -729,6 +749,7 @@ "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", "license": "MIT", + "peer": true, "dependencies": { "@nodelib/fs.scandir": "2.1.5", "fastq": "^1.6.0" @@ -742,6 +763,7 @@ "resolved": "https://registry.npmjs.org/@sindresorhus/merge-streams/-/merge-streams-2.3.0.tgz", "integrity": "sha512-LtoMMhxAlorcGhmFYI+LhPgbPZCkgP6ra1YL604EeF6U98pLlQ3iWIGMdWSC+vWmPBWBNgmDBAhnAobLROJmwg==", "license": "MIT", + "peer": true, "engines": { "node": ">=18" }, @@ -802,6 +824,7 @@ "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.2.2.tgz", "integrity": "sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg==", "license": "MIT", + "peer": true, "engines": { "node": ">=12" }, @@ -814,6 +837,7 @@ "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.3.tgz", "integrity": "sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg==", "license": "MIT", + "peer": true, "engines": { "node": ">=12" }, @@ -826,6 +850,7 @@ "resolved": "https://registry.npmjs.org/cliui/-/cliui-9.0.1.tgz", "integrity": "sha512-k7ndgKhwoQveBL+/1tqGJYNz097I7WOvwbmmU2AR5+magtbjPWQTS1C5vzGkBC8Ym8UWRzfKUzUUqFLypY4Q+w==", "license": "ISC", + "peer": true, "dependencies": { "string-width": "^7.2.0", "strip-ansi": "^7.1.0", @@ -839,13 +864,15 @@ "version": "10.5.0", "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-10.5.0.tgz", "integrity": "sha512-lb49vf1Xzfx080OKA0o6l8DQQpV+6Vg95zyCJX9VB/BqKYlhG7N4wgROUUHRA+ZPUefLnteQOad7z1kT2bV7bg==", - "license": "MIT" + "license": "MIT", + "peer": true }, "node_modules/@typespec/compiler/node_modules/prettier": { "version": "3.6.2", "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.6.2.tgz", "integrity": "sha512-I7AIg5boAr5R0FFtJ6rCfD+LFsWHp81dolrFD8S79U9tb8Az2nGrJncnMSnys+bpQJfRUzqs9hnA81OAA3hCuQ==", "license": "MIT", + "peer": true, "bin": { "prettier": "bin/prettier.cjs" }, @@ -861,6 +888,7 @@ "resolved": "https://registry.npmjs.org/string-width/-/string-width-7.2.0.tgz", "integrity": "sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ==", "license": "MIT", + "peer": true, "dependencies": { "emoji-regex": "^10.3.0", "get-east-asian-width": "^1.0.0", @@ -878,6 +906,7 @@ "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.2.tgz", "integrity": "sha512-gmBGslpoQJtgnMAvOVqGZpEz9dyoKTCzy2nfz/n8aIFhN/jCE/rCmcxabB6jOOHV+0WNnylOxaxBQPSvcWklhA==", "license": "MIT", + "peer": true, "dependencies": { "ansi-regex": "^6.0.1" }, @@ -893,6 +922,7 @@ "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-9.0.2.tgz", "integrity": "sha512-42AtmgqjV+X1VpdOfyTGOYRi0/zsoLqtXQckTmqTeybT+BDIbM/Guxo7x3pE2vtpr1ok6xRqM9OpBe+Jyoqyww==", "license": "MIT", + "peer": true, "dependencies": { "ansi-styles": "^6.2.1", "string-width": "^7.0.0", @@ -910,6 +940,7 @@ "resolved": "https://registry.npmjs.org/yargs/-/yargs-18.0.0.tgz", "integrity": "sha512-4UEqdc2RYGHZc7Doyqkrqiln3p9X2DZVxaGbwhn2pi7MrRagKaOcIKe8L3OxYcbhXLgLFUS3zAYuQjKBQgmuNg==", "license": "MIT", + "peer": true, "dependencies": { "cliui": "^9.0.1", "escalade": "^3.1.1", @@ -927,6 +958,7 @@ "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-22.0.0.tgz", "integrity": "sha512-rwu/ClNdSMpkSrUb+d6BRsSkLUq1fmfsY6TOpYzTwvwkg1/NRG85KBy3kq++A8LKQwX6lsu+aWad+2khvuXrqw==", "license": "ISC", + "peer": true, "engines": { "node": "^20.19.0 || ^22.12.0 || >=23" } @@ -1098,6 +1130,7 @@ "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz", "integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==", "license": "MIT", + "peer": true, "dependencies": { "fast-deep-equal": "^3.1.3", "fast-uri": "^3.0.1", @@ -1114,6 +1147,7 @@ "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz", "integrity": "sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==", "license": "MIT", + "peer": true, "dependencies": { "type-fest": "^0.21.3" }, @@ -1172,6 +1206,7 @@ "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", "license": "MIT", + "peer": true, "dependencies": { "fill-range": "^7.1.1" }, @@ -1213,13 +1248,15 @@ "version": "2.1.0", "resolved": "https://registry.npmjs.org/chardet/-/chardet-2.1.0.tgz", "integrity": "sha512-bNFETTG/pM5ryzQ9Ad0lJOTa6HWD/YsScAR3EnCPZRPlQh77JocYktSHOUHelyhm8IARL+o4c4F1bP5KVOjiRA==", - "license": "MIT" + "license": "MIT", + "peer": true }, "node_modules/chownr": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/chownr/-/chownr-3.0.0.tgz", "integrity": "sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g==", "license": "BlueOak-1.0.0", + "peer": true, "engines": { "node": ">=18" } @@ -1229,6 +1266,7 @@ "resolved": "https://registry.npmjs.org/cli-width/-/cli-width-4.1.0.tgz", "integrity": "sha512-ouuZd4/dm2Sw5Gmqy6bGyNNNe1qt9RpmxveLSO7KcgsTnU7RXfsw+/bukWGo1abgBiMAic068rclZsO4IWmmxQ==", "license": "ISC", + "peer": true, "engines": { "node": ">= 12" } @@ -1343,6 +1381,7 @@ "resolved": "https://registry.npmjs.org/env-paths/-/env-paths-3.0.0.tgz", "integrity": "sha512-dtJUTepzMW3Lm/NPxRf3wP4642UWhjL2sQxc+ym2YMj1m/H2zDNQOlezafzkHwn6sMstjHTwG6iQQsctDW/b1A==", "license": "MIT", + "peer": true, "engines": { "node": "^12.20.0 || ^14.13.1 || >=16.0.0" }, @@ -1363,13 +1402,15 @@ "version": "3.1.3", "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", - "license": "MIT" + "license": "MIT", + "peer": true }, "node_modules/fast-glob": { "version": "3.3.3", "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.3.tgz", "integrity": "sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==", "license": "MIT", + "peer": true, "dependencies": { "@nodelib/fs.stat": "^2.0.2", "@nodelib/fs.walk": "^1.2.3", @@ -1395,13 +1436,15 @@ "url": "https://opencollective.com/fastify" } ], - "license": "BSD-3-Clause" + "license": "BSD-3-Clause", + "peer": true }, "node_modules/fastq": { "version": "1.19.1", "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.19.1.tgz", "integrity": "sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ==", "license": "ISC", + "peer": true, "dependencies": { "reusify": "^1.0.4" } @@ -1411,6 +1454,7 @@ "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", "license": "MIT", + "peer": true, "dependencies": { "to-regex-range": "^5.0.1" }, @@ -1432,6 +1476,7 @@ "resolved": "https://registry.npmjs.org/get-east-asian-width/-/get-east-asian-width-1.4.0.tgz", "integrity": "sha512-QZjmEOC+IT1uk6Rx0sX22V6uHWVwbdbxf1faPqJ1QhLdGgsRGCZoyaQBm/piRdJy/D2um6hM1UP7ZEeQ4EkP+Q==", "license": "MIT", + "peer": true, "engines": { "node": ">=18" }, @@ -1444,6 +1489,7 @@ "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", "license": "ISC", + "peer": true, "dependencies": { "is-glob": "^4.0.1" }, @@ -1456,6 +1502,7 @@ "resolved": "https://registry.npmjs.org/globby/-/globby-14.1.0.tgz", "integrity": "sha512-0Ia46fDOaT7k4og1PDW4YbodWWr3scS2vAr2lTbsplOt2WkKp0vQbkI9wKis/T5LV/dqPjO3bpS/z6GTJB82LA==", "license": "MIT", + "peer": true, "dependencies": { "@sindresorhus/merge-streams": "^2.1.0", "fast-glob": "^3.3.3", @@ -1508,6 +1555,7 @@ "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", "license": "MIT", + "peer": true, "dependencies": { "safer-buffer": ">= 2.1.2 < 3.0.0" }, @@ -1520,6 +1568,7 @@ "resolved": "https://registry.npmjs.org/ignore/-/ignore-7.0.5.tgz", "integrity": "sha512-Hs59xBNfUIunMFgWAbGX5cq6893IbWg4KnrjbYwX3tx0ztorVgTDA6B2sxf8ejHJ4wz8BqGUMYlnzNBer5NvGg==", "license": "MIT", + "peer": true, "engines": { "node": ">= 4" } @@ -1529,6 +1578,7 @@ "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", "license": "MIT", + "peer": true, "engines": { "node": ">=0.10.0" } @@ -1547,6 +1597,7 @@ "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", "license": "MIT", + "peer": true, "dependencies": { "is-extglob": "^2.1.1" }, @@ -1559,6 +1610,7 @@ "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", "license": "MIT", + "peer": true, "engines": { "node": ">=0.12.0" } @@ -1568,6 +1620,7 @@ "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-2.1.0.tgz", "integrity": "sha512-mE00Gnza5EEB3Ds0HfMyllZzbBrmLOX3vfWoj9A9PEnTfratQ/BcaJOuMhnkhjXvb2+FkY3VuHqtAGpTPmglFQ==", "license": "MIT", + "peer": true, "engines": { "node": ">=18" }, @@ -1579,7 +1632,8 @@ "version": "4.0.0", "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", - "license": "MIT" + "license": "MIT", + "peer": true }, "node_modules/js-yaml": { "version": "4.1.1", @@ -1597,7 +1651,8 @@ "version": "1.0.0", "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", - "license": "MIT" + "license": "MIT", + "peer": true }, "node_modules/lodash": { "version": "4.17.21", @@ -1610,6 +1665,7 @@ "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", "license": "MIT", + "peer": true, "engines": { "node": ">= 8" } @@ -1619,6 +1675,7 @@ "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", "license": "MIT", + "peer": true, "dependencies": { "braces": "^3.0.3", "picomatch": "^2.3.1" @@ -1632,15 +1689,17 @@ "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz", "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==", "license": "ISC", + "peer": true, "engines": { "node": ">=16 || 14 >=14.17" } }, "node_modules/minizlib": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-3.0.2.tgz", - "integrity": "sha512-oG62iEk+CYt5Xj2YqI5Xi9xWUeZhDI8jjQmC5oThVH5JGCTgIjr7ciJDzC7MBzYd//WvR1OTmP5Q38Q8ShQtVA==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-3.1.0.tgz", + "integrity": "sha512-KZxYo1BUkWD2TVFLr0MQoM8vUUigWD3LlD83a/75BqC+4qE0Hb1Vo5v1FgcfaNXvfXzr+5EhQ6ing/CaBijTlw==", "license": "MIT", + "peer": true, "dependencies": { "minipass": "^7.1.2" }, @@ -1648,21 +1707,6 @@ "node": ">= 18" } }, - "node_modules/mkdirp": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-3.0.1.tgz", - "integrity": "sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg==", - "license": "MIT", - "bin": { - "mkdirp": "dist/cjs/src/bin.js" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, "node_modules/ms": { "version": "2.1.3", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", @@ -1674,6 +1718,7 @@ "resolved": "https://registry.npmjs.org/mustache/-/mustache-4.2.0.tgz", "integrity": "sha512-71ippSywq5Yb7/tVYyGbkBggbU8H3u5Rz56fH60jGFgr8uHwxs+aSKeqmluIVzM0m0kB7xQjKS6qPfd0b2ZoqQ==", "license": "MIT", + "peer": true, "bin": { "mustache": "bin/mustache" } @@ -1683,6 +1728,7 @@ "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-2.0.0.tgz", "integrity": "sha512-WWdIxpyjEn+FhQJQQv9aQAYlHoNVdzIzUySNV1gHUPDSdZJ3yZn7pAAbQcV7B56Mvu881q9FZV+0Vx2xC44VWA==", "license": "ISC", + "peer": true, "engines": { "node": "^18.17.0 || >=20.5.0" } @@ -1692,6 +1738,7 @@ "resolved": "https://registry.npmjs.org/path-type/-/path-type-6.0.0.tgz", "integrity": "sha512-Vj7sf++t5pBD637NSfkxpHSMfWaeig5+DKWLhcqIYx6mWQz5hdJTGDVMQiJcw1ZYkhs7AazKDGpRVji1LJCZUQ==", "license": "MIT", + "peer": true, "engines": { "node": ">=18" }, @@ -1703,13 +1750,15 @@ "version": "1.1.1", "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", - "license": "ISC" + "license": "ISC", + "peer": true }, "node_modules/picomatch": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", "license": "MIT", + "peer": true, "engines": { "node": ">=8.6" }, @@ -1781,7 +1830,8 @@ "url": "https://feross.org/support" } ], - "license": "MIT" + "license": "MIT", + "peer": true }, "node_modules/require-directory": { "version": "2.1.1", @@ -1797,6 +1847,7 @@ "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz", "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==", "license": "MIT", + "peer": true, "engines": { "node": ">=0.10.0" } @@ -1815,6 +1866,7 @@ "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.1.0.tgz", "integrity": "sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==", "license": "MIT", + "peer": true, "engines": { "iojs": ">=1.0.0", "node": ">=0.10.0" @@ -1839,6 +1891,7 @@ } ], "license": "MIT", + "peer": true, "dependencies": { "queue-microtask": "^1.2.2" } @@ -1847,7 +1900,8 @@ "version": "2.1.2", "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", - "license": "MIT" + "license": "MIT", + "peer": true }, "node_modules/semver": { "version": "7.7.2", @@ -1866,6 +1920,7 @@ "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", "license": "ISC", + "peer": true, "engines": { "node": ">=14" }, @@ -1893,6 +1948,7 @@ "resolved": "https://registry.npmjs.org/slash/-/slash-5.1.0.tgz", "integrity": "sha512-ZA6oR3T/pEyuqwMgAKT0/hAv8oAXckzbkmR0UkUosQ+Mc4RxGoJkRmwHgHufaenlyAgE1Mxgpdcrf75y6XcnDg==", "license": "MIT", + "peer": true, "engines": { "node": ">=14.16" }, @@ -1960,16 +2016,16 @@ "license": "MIT" }, "node_modules/tar": { - "version": "7.4.3", - "resolved": "https://registry.npmjs.org/tar/-/tar-7.4.3.tgz", - "integrity": "sha512-5S7Va8hKfV7W5U6g3aYxXmlPoZVAwUMy9AOKyF2fVuZa2UD3qZjg578OrLRt8PcNN1PleVaL/5/yYATNL0ICUw==", - "license": "ISC", + "version": "7.5.3", + "resolved": "https://registry.npmjs.org/tar/-/tar-7.5.3.tgz", + "integrity": "sha512-ENg5JUHUm2rDD7IvKNFGzyElLXNjachNLp6RaGf4+JOgxXHkqA+gq81ZAMCUmtMtqBsoU62lcp6S27g1LCYGGQ==", + "license": "BlueOak-1.0.0", + "peer": true, "dependencies": { "@isaacs/fs-minipass": "^4.0.0", "chownr": "^3.0.0", "minipass": "^7.1.2", - "minizlib": "^3.0.1", - "mkdirp": "^3.0.1", + "minizlib": "^3.1.0", "yallist": "^5.0.0" }, "engines": { @@ -1981,6 +2037,7 @@ "resolved": "https://registry.npmjs.org/temporal-polyfill/-/temporal-polyfill-0.3.0.tgz", "integrity": "sha512-qNsTkX9K8hi+FHDfHmf22e/OGuXmfBm9RqNismxBrnSmZVJKegQ+HYYXT+R7Ha8F/YSm2Y34vmzD4cxMu2u95g==", "license": "MIT", + "peer": true, "dependencies": { "temporal-spec": "0.3.0" } @@ -1989,7 +2046,8 @@ "version": "0.3.0", "resolved": "https://registry.npmjs.org/temporal-spec/-/temporal-spec-0.3.0.tgz", "integrity": "sha512-n+noVpIqz4hYgFSMOSiINNOUOMFtV5cZQNCmmszA6GiVFVRt3G7AqVyhXjhCSmowvQn+NsGn+jMDMKJYHd3bSQ==", - "license": "ISC" + "license": "ISC", + "peer": true }, "node_modules/title-case": { "version": "3.0.3", @@ -2005,6 +2063,7 @@ "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", "license": "MIT", + "peer": true, "dependencies": { "is-number": "^7.0.0" }, @@ -2023,6 +2082,7 @@ "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz", "integrity": "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==", "license": "(MIT OR CC0-1.0)", + "peer": true, "engines": { "node": ">=10" }, @@ -2035,6 +2095,7 @@ "resolved": "https://registry.npmjs.org/unicorn-magic/-/unicorn-magic-0.3.0.tgz", "integrity": "sha512-+QBBXBCvifc56fsbuxZQ6Sic3wqqc3WWaqxs58gvJrcOuN83HGTCwz3oS5phzU9LthRNE9VrJCFCLUgHeeFnfA==", "license": "MIT", + "peer": true, "engines": { "node": ">=18" }, @@ -2056,6 +2117,7 @@ "resolved": "https://registry.npmjs.org/vscode-languageserver/-/vscode-languageserver-9.0.1.tgz", "integrity": "sha512-woByF3PDpkHFUreUa7Hos7+pUWdeWMXRd26+ZX2A8cFx6v/JPTtd4/uN0/jB6XQHYaOlHbio03NTHCqrgG5n7g==", "license": "MIT", + "peer": true, "dependencies": { "vscode-languageserver-protocol": "3.17.5" }, @@ -2068,6 +2130,7 @@ "resolved": "https://registry.npmjs.org/vscode-languageserver-protocol/-/vscode-languageserver-protocol-3.17.5.tgz", "integrity": "sha512-mb1bvRJN8SVznADSGWM9u/b07H7Ecg0I3OgXDuLdn307rl/J3A9YD6/eYOssqhecL27hK1IPZAsaqh00i/Jljg==", "license": "MIT", + "peer": true, "dependencies": { "vscode-jsonrpc": "8.2.0", "vscode-languageserver-types": "3.17.5" @@ -2078,6 +2141,7 @@ "resolved": "https://registry.npmjs.org/vscode-jsonrpc/-/vscode-jsonrpc-8.2.0.tgz", "integrity": "sha512-C+r0eKJUIfiDIfwJhria30+TYWPtuHJXHtI7J0YlOmKAo7ogxP20T0zxB7HZQIFhIyvoBPwWskjxrvAtfjyZfA==", "license": "MIT", + "peer": true, "engines": { "node": ">=14.0.0" } @@ -2086,19 +2150,22 @@ "version": "1.0.12", "resolved": "https://registry.npmjs.org/vscode-languageserver-textdocument/-/vscode-languageserver-textdocument-1.0.12.tgz", "integrity": "sha512-cxWNPesCnQCcMPeenjKKsOCKQZ/L6Tv19DTRIGuLWe32lyzWhihGVJ/rcckZXJxfdKCFvRLS3fpBIsV/ZGX4zA==", - "license": "MIT" + "license": "MIT", + "peer": true }, "node_modules/vscode-languageserver-types": { "version": "3.17.5", "resolved": "https://registry.npmjs.org/vscode-languageserver-types/-/vscode-languageserver-types-3.17.5.tgz", "integrity": "sha512-Ld1VelNuX9pdF39h2Hgaeb5hEZM2Z3jUrrMgWQAu82jMtZp7p3vJT3BzToKtZI7NgQssZje5o0zryOrhQvzQAg==", - "license": "MIT" + "license": "MIT", + "peer": true }, "node_modules/wrap-ansi": { "version": "6.2.0", "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz", "integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==", "license": "MIT", + "peer": true, "dependencies": { "ansi-styles": "^4.0.0", "string-width": "^4.1.0", @@ -2113,6 +2180,7 @@ "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", "license": "MIT", + "peer": true, "engines": { "node": ">=8" } @@ -2122,6 +2190,7 @@ "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", "license": "MIT", + "peer": true, "dependencies": { "ansi-regex": "^5.0.1" }, @@ -2143,6 +2212,7 @@ "resolved": "https://registry.npmjs.org/yallist/-/yallist-5.0.0.tgz", "integrity": "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw==", "license": "BlueOak-1.0.0", + "peer": true, "engines": { "node": ">=18" } @@ -2191,6 +2261,7 @@ "resolved": "https://registry.npmjs.org/yoctocolors-cjs/-/yoctocolors-cjs-2.1.3.tgz", "integrity": "sha512-U/PBtDf35ff0D8X8D0jfdzHYEPFxAI7jJlxZXwCSez5M3190m+QobIfh+sWDWSHMCWWJN2AWamkegn6vr6YBTw==", "license": "MIT", + "peer": true, "engines": { "node": ">=18" }, From 41022bf72717ca7bd3b5ea4845099d8fbd4aca89 Mon Sep 17 00:00:00 2001 From: Darren Cohen <39422044+dargilco@users.noreply.github.com> Date: Mon, 26 Jan 2026 09:47:03 -0800 Subject: [PATCH 12/18] Update @azure-tools/openai-typespec to latest version (1.7.1) (#44839) --- eng/emitter-package-lock.json | 28 +++++++++++++++++++++------- eng/emitter-package.json | 2 +- 2 files changed, 22 insertions(+), 8 deletions(-) diff --git a/eng/emitter-package-lock.json b/eng/emitter-package-lock.json index 184ea56e81bd..ffc7c817f182 100644 --- a/eng/emitter-package-lock.json +++ b/eng/emitter-package-lock.json @@ -9,7 +9,7 @@ "@azure-tools/typespec-python": "0.58.0" }, "devDependencies": { - "@azure-tools/openai-typespec": "1.6.2", + "@azure-tools/openai-typespec": "1.7.1", "@azure-tools/typespec-autorest": "~0.64.0", "@azure-tools/typespec-azure-core": "~0.64.0", "@azure-tools/typespec-azure-resource-manager": "~0.64.0", @@ -28,9 +28,9 @@ } }, "node_modules/@azure-tools/openai-typespec": { - "version": "1.6.2", - "resolved": "https://registry.npmjs.org/@azure-tools/openai-typespec/-/openai-typespec-1.6.2.tgz", - "integrity": "sha512-u2Y8ZS6OMO9lUbSyYgBiDuGQkF/ccv9qk6ipvDwu+vZmxRx2c9Zp2VOcC6dxWNOT62DWjlfmbJQy7E/fNaQVrw==", + "version": "1.7.1", + "resolved": "https://registry.npmjs.org/@azure-tools/openai-typespec/-/openai-typespec-1.7.1.tgz", + "integrity": "sha512-DoZLbQPgHyWKbIiUkCz2/9kKxWnZ/fZwvKGnjo3AJkZ8Si3QOHXR3JBp9LWiErsWPujg6k7NkTocqBXxih8pqQ==", "dev": true, "license": "MIT", "peerDependencies": { @@ -43,6 +43,7 @@ "resolved": "https://registry.npmjs.org/@azure-tools/typespec-autorest/-/typespec-autorest-0.64.0.tgz", "integrity": "sha512-zC2e3px+BqGJvE9DeW00S0PZmkydorB3Hm6Fb2vlJUdmHuTTSochPiZFJF7LHNsAL8sDu7azSHzypESFdN0FmA==", "license": "MIT", + "peer": true, "engines": { "node": ">=20.0.0" }, @@ -68,6 +69,7 @@ "resolved": "https://registry.npmjs.org/@azure-tools/typespec-azure-core/-/typespec-azure-core-0.64.0.tgz", "integrity": "sha512-BXiHc5oayhMsG1dHFU1aFK/ZQX2Gl0dKB0FAFceapaFV9093J2obbsdhIDR3Tl0qei9g3Ha+iWKZ4KgnLdhv4w==", "license": "MIT", + "peer": true, "engines": { "node": ">=20.0.0" }, @@ -82,6 +84,7 @@ "resolved": "https://registry.npmjs.org/@azure-tools/typespec-azure-resource-manager/-/typespec-azure-resource-manager-0.64.0.tgz", "integrity": "sha512-1HwGo3Nt8ksafoPp1rFOopSzgh68SFsyVNCauzjO8ftf0fEqhRXo70OaGwP6wmTZJsLnW7u1DbrBNu6b0z2sOQ==", "license": "MIT", + "peer": true, "dependencies": { "change-case": "~5.4.4", "pluralize": "^8.0.0" @@ -103,6 +106,7 @@ "resolved": "https://registry.npmjs.org/@azure-tools/typespec-azure-rulesets/-/typespec-azure-rulesets-0.64.0.tgz", "integrity": "sha512-CvK5iolfsm8oAUZ5wegGVYp4Vvw2rwQa+rcUVoJkwi9c6QwEr+qT6/S4hIntuzEPLxybJSb/ZIWU9Qx3cDrzXg==", "license": "MIT", + "peer": true, "engines": { "node": ">=20.0.0" }, @@ -114,10 +118,11 @@ } }, "node_modules/@azure-tools/typespec-client-generator-core": { - "version": "0.64.2", - "resolved": "https://registry.npmjs.org/@azure-tools/typespec-client-generator-core/-/typespec-client-generator-core-0.64.2.tgz", - "integrity": "sha512-1y5YNnMWQXQjjinmAINP9BpV8U5KBdgw/sqFZbtaoJ/gOQynG8TR1xQGgVbkg/sfiMlTYSiL8ru1efO7vHVuMA==", + "version": "0.64.3", + "resolved": "https://registry.npmjs.org/@azure-tools/typespec-client-generator-core/-/typespec-client-generator-core-0.64.3.tgz", + "integrity": "sha512-CnwoynIZD2+0c1/ZgupXCJHst9OLczzFcaiVIYOSK3V/fVkGRvip/hg4i0/9spBCwZVbv10YpNpIqbB2zGilMg==", "license": "MIT", + "peer": true, "dependencies": { "change-case": "~5.4.4", "pluralize": "^8.0.0", @@ -1009,6 +1014,7 @@ "resolved": "https://registry.npmjs.org/@typespec/compiler/-/compiler-1.8.0.tgz", "integrity": "sha512-FeLb7Q0z6Bh5dDpqtnU2RlWiIWWWF7rujx2xGMta5dcTuIOZ4jbdyz1hVdxk4iM4qadvaSV4ey/qrSuffNoh3w==", "license": "MIT", + "peer": true, "dependencies": { "@babel/code-frame": "~7.27.1", "@inquirer/prompts": "^8.0.1", @@ -1053,6 +1059,7 @@ "resolved": "https://registry.npmjs.org/@typespec/events/-/events-0.78.0.tgz", "integrity": "sha512-gSI4rAexxfYyZX0ZqYNRWQyuMb1UeakjAjOeh/2ntmxWCdYc+wSbJjxrxIArsZC+LwzTxq5WpdtD7+7OWzG4yw==", "license": "MIT", + "peer": true, "engines": { "node": ">=20.0.0" }, @@ -1065,6 +1072,7 @@ "resolved": "https://registry.npmjs.org/@typespec/http/-/http-1.8.0.tgz", "integrity": "sha512-ZKa4RISabwL8cUAmE3BkoNmtCYRjerO0+1Ba6XdDJKG+vJC5EGM2hkDf+ZmYsYZgrX0cvbhPXUKKh28zBV60hw==", "license": "MIT", + "peer": true, "engines": { "node": ">=20.0.0" }, @@ -1116,6 +1124,7 @@ "resolved": "https://registry.npmjs.org/@typespec/openapi/-/openapi-1.8.0.tgz", "integrity": "sha512-v+RIJpx7vALBSGQmnUWemvXjnrk50HAVqJeg0RbaF3VUnh66Z4itsoNJJmIIc+HmBJng8Ie0V7xv3l02ek6HWA==", "license": "MIT", + "peer": true, "engines": { "node": ">=20.0.0" }, @@ -1129,6 +1138,7 @@ "resolved": "https://registry.npmjs.org/@typespec/rest/-/rest-0.78.0.tgz", "integrity": "sha512-1clnDw1JbBvjLcfFvEvHdIrnsQuQI5/Cl6mRIrzWWX0pKJ+R89rCdZD1KpidEXw4B4qscD48LsssyrEIFLtuPg==", "license": "MIT", + "peer": true, "engines": { "node": ">=20.0.0" }, @@ -1142,6 +1152,7 @@ "resolved": "https://registry.npmjs.org/@typespec/sse/-/sse-0.78.0.tgz", "integrity": "sha512-jPARl+e1e/nsDW/1uVsGTzvKmjqezVMyUa13igXxk5nV2ScMdFpH1HhBwTmAhUeaZgY3J81dFHNUnIY67HCrmw==", "license": "MIT", + "peer": true, "engines": { "node": ">=20.0.0" }, @@ -1157,6 +1168,7 @@ "resolved": "https://registry.npmjs.org/@typespec/streams/-/streams-0.78.0.tgz", "integrity": "sha512-wzh5bVdzh+K+pFQFs/EZkVsTH5TQGi12XwhjxJS0UKRwaW2UwSZeY1HqX07oMMPdYESTbjgMrXcxtn89AlzjvQ==", "license": "MIT", + "peer": true, "engines": { "node": ">=20.0.0" }, @@ -1169,6 +1181,7 @@ "resolved": "https://registry.npmjs.org/@typespec/versioning/-/versioning-0.78.0.tgz", "integrity": "sha512-I14X6+IMd0wFMNI8oMFSeFBi2nD4idub+geSO34vuCs4rwuEj3FNzy+rkNkDDvf0+gIUGxeyg7s+YDUcNyiqOA==", "license": "MIT", + "peer": true, "engines": { "node": ">=20.0.0" }, @@ -1181,6 +1194,7 @@ "resolved": "https://registry.npmjs.org/@typespec/xml/-/xml-0.78.0.tgz", "integrity": "sha512-KSDhJX6A/Onsu9FKVZtR/xSy5va3k0y9/U4eiZUn91V/LQyMZNwmResPDHEVYk6JqaIH8bbd6ANWPu3nMd7mmw==", "license": "MIT", + "peer": true, "engines": { "node": ">=20.0.0" }, diff --git a/eng/emitter-package.json b/eng/emitter-package.json index 6e519f56094c..7ef172de5040 100644 --- a/eng/emitter-package.json +++ b/eng/emitter-package.json @@ -13,7 +13,7 @@ "@typespec/sse": "~0.78.0", "@typespec/streams": "~0.78.0", "@typespec/xml": "~0.78.0", - "@azure-tools/openai-typespec": "1.6.2", + "@azure-tools/openai-typespec": "1.7.1", "@azure-tools/typespec-autorest": "~0.64.0", "@azure-tools/typespec-azure-core": "~0.64.0", "@azure-tools/typespec-azure-resource-manager": "~0.64.0", From f0a18827145393942a50e55cafe14260b966c36f Mon Sep 17 00:00:00 2001 From: Matthew Metcalf Date: Mon, 26 Jan 2026 10:55:16 -0800 Subject: [PATCH 13/18] App Config 1.8.0 Release (#44847) * Update CHANGELOG.md * 1.8.0 * Apply suggestion from @jimmyca15 Co-authored-by: Jimmy Campbell * Update CHANGELOG.md --------- Co-authored-by: Jimmy Campbell --- .../azure-appconfiguration/CHANGELOG.md | 10 +++------- .../azure/appconfiguration/_version.py | 2 +- 2 files changed, 4 insertions(+), 8 deletions(-) diff --git a/sdk/appconfiguration/azure-appconfiguration/CHANGELOG.md b/sdk/appconfiguration/azure-appconfiguration/CHANGELOG.md index 6cfdb3fe2ee3..7af08470ff7a 100644 --- a/sdk/appconfiguration/azure-appconfiguration/CHANGELOG.md +++ b/sdk/appconfiguration/azure-appconfiguration/CHANGELOG.md @@ -1,17 +1,13 @@ # Release History -## 1.7.3 (Unreleased) +## 1.8.0 (2026-01-26) ### Features Added - Fixed AudiencePolicy to correctly handle AAD audience errors and return ClientAuthenticationError as expected. -- Added `match_conditions` parameter to `by_page()` method in `list_configuration_settings()` to efficiently monitor configuration changes using etags without fetching unchanged data. +- Added a `match_conditions` parameter to the `by_page()` method exposed by the page iterator returned by `list_configuration_settings()` to efficiently monitor configuration changes using etags without fetching unchanged data. - Added query parameter normalization to support Azure Front Door as a CDN. Query parameter keys are now converted to lowercase and sorted alphabetically. -- Added support for custom authentication audiences via the `audience` keyword argument in `AzureAppConfigurationClient` constructor to enable authentication against sovereign clouds. - -### Breaking Changes - -### Bugs Fixed +- Added support for providing Entra ID authentication audiences via the `audience` keyword argument in the `AzureAppConfigurationClient` constructor to enable authentication against sovereign clouds. ### Other Changes diff --git a/sdk/appconfiguration/azure-appconfiguration/azure/appconfiguration/_version.py b/sdk/appconfiguration/azure-appconfiguration/azure/appconfiguration/_version.py index 42102d254f57..ebc49010a604 100644 --- a/sdk/appconfiguration/azure-appconfiguration/azure/appconfiguration/_version.py +++ b/sdk/appconfiguration/azure-appconfiguration/azure/appconfiguration/_version.py @@ -3,4 +3,4 @@ # Licensed under the MIT License. # ------------------------------------ -VERSION = "1.7.3" +VERSION = "1.8.0" From afbdce0505d713a2702555323308eafafceb373e Mon Sep 17 00:00:00 2001 From: Darren Cohen <39422044+dargilco@users.noreply.github.com> Date: Mon, 26 Jan 2026 11:17:45 -0800 Subject: [PATCH 14/18] Python SDK emitted from TypeSpec branch `feature/foundry-v2-spec-folder-structure` (#44580) --- sdk/ai/azure-ai-projects/CHANGELOG.md | 17 + sdk/ai/azure-ai-projects/README.md | 32 +- sdk/ai/azure-ai-projects/_tsp-location.yaml | 6 +- .../azure-ai-projects/apiview-properties.json | 347 +- .../azure/ai/projects/_types.py | 1 + .../azure/ai/projects/_utils/model_base.py | 139 +- .../azure/ai/projects/_utils/serialization.py | 17 +- .../ai/projects/aio/operations/_operations.py | 408 +- .../aio/operations/_patch_memories_async.py | 16 +- .../azure/ai/projects/models/__init__.py | 674 +- .../azure/ai/projects/models/_enums.py | 504 +- .../azure/ai/projects/models/_models.py | 13401 +++++++--------- .../azure/ai/projects/models/_patch.py | 3 +- .../ai/projects/operations/_operations.py | 446 +- .../ai/projects/operations/_patch_memories.py | 16 +- .../telemetry/_ai_project_instrumentor.py | 5 + sdk/ai/azure-ai-projects/cspell.json | 54 +- .../azure-ai-projects/post-emitter-fixes.cmd | 9 +- sdk/ai/azure-ai-projects/pyproject.toml | 3 +- sdk/ai/azure-ai-projects/pyrightconfig.json | 2 - .../agents/sample_agent_structured_output.py | 6 +- .../sample_agent_structured_output_async.py | 6 +- .../agents/sample_workflow_multi_agent.py | 8 +- .../sample_workflow_multi_agent_async.py | 8 +- .../agents/tools/sample_agent_ai_search.py | 6 +- .../tools/sample_agent_bing_custom_search.py | 6 +- .../tools/sample_agent_bing_grounding.py | 8 +- .../tools/sample_agent_browser_automation.py | 6 +- .../tools/sample_agent_code_interpreter.py | 4 +- .../sample_agent_code_interpreter_async.py | 4 +- .../agents/tools/sample_agent_fabric.py | 6 +- .../sample_agent_image_generation_async.py | 1 + .../tools/sample_agent_memory_search.py | 4 +- .../tools/sample_agent_memory_search_async.py | 4 +- .../agents/tools/sample_agent_openapi.py | 6 +- ...e_agent_openapi_with_project_connection.py | 6 +- .../agents/tools/sample_agent_sharepoint.py | 6 +- .../agents/tools/sample_agent_to_agent.py | 7 +- .../memories/sample_memory_advanced.py | 19 +- .../memories/sample_memory_advanced_async.py | 19 +- .../samples/memories/sample_memory_basic.py | 10 +- .../memories/sample_memory_basic_async.py | 10 +- .../telemetry/test_ai_agents_instrumentor.py | 18 +- .../test_ai_agents_instrumentor_async.py | 18 +- .../telemetry/test_responses_instrumentor.py | 6 +- .../tests/agents/test_agent_responses_crud.py | 6 +- .../agents/test_agent_responses_crud_async.py | 4 +- .../agents/test_conversation_items_crud.py | 34 +- .../test_conversation_items_crud_async.py | 35 +- ...est_agent_code_interpreter_and_function.py | 11 +- ..._agent_file_search_and_code_interpreter.py | 11 +- ...t_file_search_code_interpreter_function.py | 4 +- .../agents/tools/test_agent_ai_search.py | 6 +- .../tools/test_agent_ai_search_async.py | 6 +- .../agents/tools/test_agent_bing_grounding.py | 8 +- .../tools/test_agent_code_interpreter.py | 6 +- .../test_agent_code_interpreter_async.py | 4 +- .../agents/tools/test_agent_memory_search.py | 8 +- .../tools/test_agent_memory_search_async.py | 4 +- .../tests/agents/tools/test_agent_openapi.py | 8 +- .../agents/tools/test_agent_openapi_async.py | 4 +- .../test_agent_tools_with_conversations.py | 7 +- sdk/ai/azure-ai-projects/tests/conftest.py | 10 +- .../tests/finetuning/test_finetuning.py | 1 + .../tests/finetuning/test_finetuning_async.py | 1 + sdk/ai/azure-ai-projects/tests/test_base.py | 20 +- sdk/ai/azure-ai-projects/tsp-location.yaml | 4 - 67 files changed, 7561 insertions(+), 8943 deletions(-) delete mode 100644 sdk/ai/azure-ai-projects/tsp-location.yaml diff --git a/sdk/ai/azure-ai-projects/CHANGELOG.md b/sdk/ai/azure-ai-projects/CHANGELOG.md index 4c9eee2ce54d..08840fce6a97 100644 --- a/sdk/ai/azure-ai-projects/CHANGELOG.md +++ b/sdk/ai/azure-ai-projects/CHANGELOG.md @@ -3,8 +3,25 @@ ## 2.0.0b4 (Unreleased) ### Features Added + * Tracing: included agent ID in response generation traces when available. +### Breaking changes + +* To align with OpenAI naming conventions, use "Tool" suffix for class names describing Azure tools that are generally available (stable release): + * Rename class `AzureAISearchAgentTool` to `AzureAISearchTool`. + * Rename class `AzureFunctionAgentTool` to `AzureFunctionTool`. + * Rename class `BingGroundingAgentTool` to `BingGroundingTool`. + * Rename class `OpenApiAgentTool` to OpenApiTool`. +* To align with OpenAI naming conventions, use "PreviewTool" suffix for class names descirbing Azure tools in preview: + * Rename class `A2ATool` to `A2APreviewTool`. + * Rename class `BingCustomSearchAgentTool` to `BingCustomSearchPreviewTool`. + * Rename class `BrowserAutomationAgentTool` to `BrowserAutomationPreviewTool`. + * Rename class `MemorySearchTool` to `MemorySearchPreviewTool`. + * Rename class `MicrosoftFabricAgentTool` to `MicrosoftFabricPreviewTool`. + * Rename class `SharepointAgentTool` to `SharepointPreviewTool`. +* Rename class `ItemParam` to `InputItem`. + ## 2.0.0b3 (2026-01-06) ### Features Added diff --git a/sdk/ai/azure-ai-projects/README.md b/sdk/ai/azure-ai-projects/README.md index ef0f9332d6eb..f6c858708b72 100644 --- a/sdk/ai/azure-ai-projects/README.md +++ b/sdk/ai/azure-ai-projects/README.md @@ -218,7 +218,7 @@ asset_file_path = os.path.abspath( # Upload the CSV file for the code interpreter file = openai_client.files.create(purpose="assistants", file=open(asset_file_path, "rb")) -tool = CodeInterpreterTool(container=CodeInterpreterToolAuto(file_ids=[file.id])) +tool = CodeInterpreterTool(container=CodeInterpreterContainerAuto(file_ids=[file.id])) ``` @@ -348,7 +348,7 @@ Call external APIs defined by OpenAPI specifications without additional client-s with open(weather_asset_file_path, "r") as f: openapi_weather = jsonref.loads(f.read()) -tool = OpenApiAgentTool( +tool = OpenApiTool( openapi=OpenApiFunctionDefinition( name="get_weather", spec=openapi_weather, @@ -403,7 +403,7 @@ See the full sample in file `\agents\tools\sample_agent_function_tool.py` in the # You can also use "{{$userId}}" to take the oid of the request authentication header scope = "user_123" - tool = MemorySearchTool( + tool = MemorySearchPreviewTool( memory_store_name=memory_store.name, scope=scope, update_delay=1, # Wait 1 second of inactivity before updating memories @@ -427,7 +427,7 @@ Integrate with Azure AI Search indexes for powerful knowledge retrieval and sema ```python -tool = AzureAISearchAgentTool( +tool = AzureAISearchTool( azure_ai_search=AzureAISearchToolResource( indexes=[ AISearchIndexResource( @@ -451,7 +451,7 @@ Ground agent responses with real-time web search results from Bing to provide up ```python -tool = BingGroundingAgentTool( +tool = BingGroundingTool( bing_grounding=BingGroundingSearchToolParameters( search_configurations=[ BingGroundingSearchConfiguration(project_connection_id=os.environ["BING_PROJECT_CONNECTION_ID"]) @@ -471,7 +471,7 @@ Use custom-configured Bing search instances for domain-specific or filtered web ```python -tool = BingCustomSearchAgentTool( +tool = BingCustomSearchPreviewTool( bing_custom_search_preview=BingCustomSearchToolParameters( search_configurations=[ BingCustomSearchConfiguration( @@ -494,7 +494,7 @@ Connect to and query Microsoft Fabric: ```python -tool = MicrosoftFabricAgentTool( +tool = MicrosoftFabricPreviewTool( fabric_dataagent_preview=FabricDataAgentToolParameters( project_connections=[ ToolProjectConnection(project_connection_id=os.environ["FABRIC_PROJECT_CONNECTION_ID"]) @@ -514,7 +514,7 @@ Access and search SharePoint documents, lists, and sites for enterprise knowledg ```python -tool = SharepointAgentTool( +tool = SharepointPreviewTool( sharepoint_grounding_preview=SharepointGroundingToolParameters( project_connections=[ ToolProjectConnection(project_connection_id=os.environ["SHAREPOINT_PROJECT_CONNECTION_ID"]) @@ -534,7 +534,7 @@ Automate browser interactions for web scraping, testing, and interaction with we ```python -tool = BrowserAutomationAgentTool( +tool = BrowserAutomationPreviewTool( browser_automation_preview=BrowserAutomationToolParameters( connection=BrowserAutomationToolConnectionParameters( project_connection_id=os.environ["BROWSER_AUTOMATION_PROJECT_CONNECTION_ID"], @@ -574,7 +574,7 @@ Enable multi-agent collaboration where agents can communicate and delegate tasks ```python -tool = A2ATool( +tool = A2APreviewTool( project_connection_id=os.environ["A2A_PROJECT_CONNECTION_ID"], ) # If the connection is missing target, we need to set the A2A endpoint URL. @@ -596,7 +596,7 @@ Call external APIs defined by OpenAPI specifications using project connection au with open(tripadvisor_asset_file_path, "r") as f: openapi_tripadvisor = jsonref.loads(f.read()) -tool = OpenApiAgentTool( +tool = OpenApiTool( openapi=OpenApiFunctionDefinition( name="tripadvisor", spec=openapi_tripadvisor, @@ -1128,7 +1128,15 @@ Operation returned an invalid status 'Unauthorized' ### Logging -The client uses the standard [Python logging library](https://docs.python.org/3/library/logging.html). The SDK logs HTTP request and response details, which may be useful in troubleshooting. To log to stdout, add the following at the top of your Python script: +The client uses the standard [Python logging library](https://docs.python.org/3/library/logging.html). The logs include HTTP request and response headers and body, which are often useful when troubleshooting or reporting an issue to Microsoft. + +#### Default console logging + +To turn on client console logging define the environment variable `AZURE_AI_PROJECTS_CONSOLE_LOGGING=true` before running your Python script. Note that the log is not redacted and contains sensitive information such as your authentication token. Be sure to remove any sensitive information before sharing this log. + +#### Customizing your log + +Instead of using the above-mentioned environment variable, you can configure logging yourself and control the log level, format and destination. To log to `stdout`, add the following at the top of your Python script: ```python import sys diff --git a/sdk/ai/azure-ai-projects/_tsp-location.yaml b/sdk/ai/azure-ai-projects/_tsp-location.yaml index 80b8dc2ae282..00631b52071a 100644 --- a/sdk/ai/azure-ai-projects/_tsp-location.yaml +++ b/sdk/ai/azure-ai-projects/_tsp-location.yaml @@ -1,4 +1,4 @@ -directory: specification/ai/Azure.AI.Projects -commit: 78bfd335c31b8764578cfb9840f3b74349c10354 -repo: Azure/azure-rest-api-specs-pr +directory: specification/ai/Foundry +commit: 51415f0131fc0e65f88e8a75b4b729a528e8e959 +repo: Azure/azure-rest-api-specs additionalDirectories: diff --git a/sdk/ai/azure-ai-projects/apiview-properties.json b/sdk/ai/azure-ai-projects/apiview-properties.json index c0d3cbfc4e93..bfc2a677bb06 100644 --- a/sdk/ai/azure-ai-projects/apiview-properties.json +++ b/sdk/ai/azure-ai-projects/apiview-properties.json @@ -2,7 +2,7 @@ "CrossLanguagePackageId": "Azure.AI.Projects", "CrossLanguageDefinitionId": { "azure.ai.projects.models.Tool": "OpenAI.Tool", - "azure.ai.projects.models.A2ATool": "Azure.AI.Projects.A2ATool", + "azure.ai.projects.models.A2APreviewTool": "Azure.AI.Projects.A2APreviewTool", "azure.ai.projects.models.InsightResult": "Azure.AI.Projects.InsightResult", "azure.ai.projects.models.AgentClusterInsightResult": "Azure.AI.Projects.AgentClusterInsightResult", "azure.ai.projects.models.InsightRequest": "Azure.AI.Projects.InsightRequest", @@ -16,87 +16,78 @@ "azure.ai.projects.models.AgentReference": "Azure.AI.Projects.AgentReference", "azure.ai.projects.models.EvaluationTaxonomyInput": "Azure.AI.Projects.EvaluationTaxonomyInput", "azure.ai.projects.models.AgentTaxonomyInput": "Azure.AI.Projects.AgentTaxonomyInput", - "azure.ai.projects.models.AgentVersionDetails": "Azure.AI.Projects.AgentVersionDetails", + "azure.ai.projects.models.AgentVersionDetails": "Azure.AI.Projects.AgentVersionObject", "azure.ai.projects.models.AISearchIndexResource": "Azure.AI.Projects.AISearchIndexResource", "azure.ai.projects.models.Annotation": "OpenAI.Annotation", - "azure.ai.projects.models.AnnotationFileCitation": "OpenAI.AnnotationFileCitation", - "azure.ai.projects.models.AnnotationFilePath": "OpenAI.AnnotationFilePath", - "azure.ai.projects.models.AnnotationUrlCitation": "OpenAI.AnnotationUrlCitation", "azure.ai.projects.models.ApiErrorResponse": "Azure.AI.Projects.ApiErrorResponse", "azure.ai.projects.models.ApiKeyCredentials": "Azure.AI.Projects.ApiKeyCredentials", - "azure.ai.projects.models.Location": "OpenAI.Location", + "azure.ai.projects.models.ApplyPatchFileOperation": "OpenAI.ApplyPatchFileOperation", + "azure.ai.projects.models.ApplyPatchCreateFileOperation": "OpenAI.ApplyPatchCreateFileOperation", + "azure.ai.projects.models.ApplyPatchOperationParam": "OpenAI.ApplyPatchOperationParam", + "azure.ai.projects.models.ApplyPatchCreateFileOperationParam": "OpenAI.ApplyPatchCreateFileOperationParam", + "azure.ai.projects.models.ApplyPatchDeleteFileOperation": "OpenAI.ApplyPatchDeleteFileOperation", + "azure.ai.projects.models.ApplyPatchDeleteFileOperationParam": "OpenAI.ApplyPatchDeleteFileOperationParam", + "azure.ai.projects.models.ApplyPatchToolParam": "OpenAI.ApplyPatchToolParam", + "azure.ai.projects.models.ApplyPatchUpdateFileOperation": "OpenAI.ApplyPatchUpdateFileOperation", + "azure.ai.projects.models.ApplyPatchUpdateFileOperationParam": "OpenAI.ApplyPatchUpdateFileOperationParam", "azure.ai.projects.models.ApproximateLocation": "OpenAI.ApproximateLocation", "azure.ai.projects.models.Target": "Azure.AI.Projects.Target", "azure.ai.projects.models.AzureAIAgentTarget": "Azure.AI.Projects.AzureAIAgentTarget", - "azure.ai.projects.models.AzureAISearchAgentTool": "Azure.AI.Projects.AzureAISearchAgentTool", "azure.ai.projects.models.Index": "Azure.AI.Projects.Index", "azure.ai.projects.models.AzureAISearchIndex": "Azure.AI.Projects.AzureAISearchIndex", + "azure.ai.projects.models.AzureAISearchTool": "Azure.AI.Projects.AzureAISearchTool", "azure.ai.projects.models.AzureAISearchToolResource": "Azure.AI.Projects.AzureAISearchToolResource", - "azure.ai.projects.models.AzureFunctionAgentTool": "Azure.AI.Projects.AzureFunctionAgentTool", "azure.ai.projects.models.AzureFunctionBinding": "Azure.AI.Projects.AzureFunctionBinding", "azure.ai.projects.models.AzureFunctionDefinition": "Azure.AI.Projects.AzureFunctionDefinition", "azure.ai.projects.models.AzureFunctionDefinitionFunction": "Azure.AI.Projects.AzureFunctionDefinition.function.anonymous", "azure.ai.projects.models.AzureFunctionStorageQueue": "Azure.AI.Projects.AzureFunctionStorageQueue", + "azure.ai.projects.models.AzureFunctionTool": "Azure.AI.Projects.AzureFunctionTool", "azure.ai.projects.models.TargetConfig": "Azure.AI.Projects.TargetConfig", "azure.ai.projects.models.AzureOpenAIModelConfiguration": "Azure.AI.Projects.AzureOpenAIModelConfiguration", - "azure.ai.projects.models.BingCustomSearchAgentTool": "Azure.AI.Projects.BingCustomSearchAgentTool", "azure.ai.projects.models.BingCustomSearchConfiguration": "Azure.AI.Projects.BingCustomSearchConfiguration", + "azure.ai.projects.models.BingCustomSearchPreviewTool": "Azure.AI.Projects.BingCustomSearchPreviewTool", "azure.ai.projects.models.BingCustomSearchToolParameters": "Azure.AI.Projects.BingCustomSearchToolParameters", - "azure.ai.projects.models.BingGroundingAgentTool": "Azure.AI.Projects.BingGroundingAgentTool", "azure.ai.projects.models.BingGroundingSearchConfiguration": "Azure.AI.Projects.BingGroundingSearchConfiguration", "azure.ai.projects.models.BingGroundingSearchToolParameters": "Azure.AI.Projects.BingGroundingSearchToolParameters", + "azure.ai.projects.models.BingGroundingTool": "Azure.AI.Projects.BingGroundingTool", "azure.ai.projects.models.BlobReference": "Azure.AI.Projects.BlobReference", "azure.ai.projects.models.BlobReferenceSasCredential": "Azure.AI.Projects.SasCredential", - "azure.ai.projects.models.BrowserAutomationAgentTool": "Azure.AI.Projects.BrowserAutomationAgentTool", + "azure.ai.projects.models.BrowserAutomationPreviewTool": "Azure.AI.Projects.BrowserAutomationPreviewTool", "azure.ai.projects.models.BrowserAutomationToolConnectionParameters": "Azure.AI.Projects.BrowserAutomationToolConnectionParameters", "azure.ai.projects.models.BrowserAutomationToolParameters": "Azure.AI.Projects.BrowserAutomationToolParameters", "azure.ai.projects.models.CaptureStructuredOutputsTool": "Azure.AI.Projects.CaptureStructuredOutputsTool", "azure.ai.projects.models.ChartCoordinate": "Azure.AI.Projects.ChartCoordinate", "azure.ai.projects.models.MemoryItem": "Azure.AI.Projects.MemoryItem", "azure.ai.projects.models.ChatSummaryMemoryItem": "Azure.AI.Projects.ChatSummaryMemoryItem", + "azure.ai.projects.models.ComputerAction": "OpenAI.ComputerAction", + "azure.ai.projects.models.ClickParam": "OpenAI.ClickParam", "azure.ai.projects.models.ClusterInsightResult": "Azure.AI.Projects.ClusterInsightResult", "azure.ai.projects.models.ClusterTokenUsage": "Azure.AI.Projects.ClusterTokenUsage", "azure.ai.projects.models.EvaluatorDefinition": "Azure.AI.Projects.EvaluatorDefinition", "azure.ai.projects.models.CodeBasedEvaluatorDefinition": "Azure.AI.Projects.CodeBasedEvaluatorDefinition", - "azure.ai.projects.models.CodeInterpreterOutput": "OpenAI.CodeInterpreterOutput", + "azure.ai.projects.models.CodeInterpreterContainerAuto": "OpenAI.CodeInterpreterContainerAuto", "azure.ai.projects.models.CodeInterpreterOutputImage": "OpenAI.CodeInterpreterOutputImage", "azure.ai.projects.models.CodeInterpreterOutputLogs": "OpenAI.CodeInterpreterOutputLogs", "azure.ai.projects.models.CodeInterpreterTool": "OpenAI.CodeInterpreterTool", - "azure.ai.projects.models.CodeInterpreterToolAuto": "OpenAI.CodeInterpreterToolAuto", - "azure.ai.projects.models.ItemParam": "OpenAI.ItemParam", - "azure.ai.projects.models.CodeInterpreterToolCallItemParam": "OpenAI.CodeInterpreterToolCallItemParam", - "azure.ai.projects.models.ItemResource": "OpenAI.ItemResource", - "azure.ai.projects.models.CodeInterpreterToolCallItemResource": "OpenAI.CodeInterpreterToolCallItemResource", "azure.ai.projects.models.ComparisonFilter": "OpenAI.ComparisonFilter", "azure.ai.projects.models.CompoundFilter": "OpenAI.CompoundFilter", - "azure.ai.projects.models.ComputerAction": "OpenAI.ComputerAction", - "azure.ai.projects.models.ComputerActionClick": "OpenAI.ComputerActionClick", - "azure.ai.projects.models.ComputerActionDoubleClick": "OpenAI.ComputerActionDoubleClick", - "azure.ai.projects.models.ComputerActionDrag": "OpenAI.ComputerActionDrag", - "azure.ai.projects.models.ComputerActionKeyPress": "OpenAI.ComputerActionKeyPress", - "azure.ai.projects.models.ComputerActionMove": "OpenAI.ComputerActionMove", - "azure.ai.projects.models.ComputerActionScreenshot": "OpenAI.ComputerActionScreenshot", - "azure.ai.projects.models.ComputerActionScroll": "OpenAI.ComputerActionScroll", - "azure.ai.projects.models.ComputerActionTypeKeys": "OpenAI.ComputerActionTypeKeys", - "azure.ai.projects.models.ComputerActionWait": "OpenAI.ComputerActionWait", - "azure.ai.projects.models.ComputerToolCallItemParam": "OpenAI.ComputerToolCallItemParam", - "azure.ai.projects.models.ComputerToolCallItemResource": "OpenAI.ComputerToolCallItemResource", - "azure.ai.projects.models.ComputerToolCallOutputItemOutput": "OpenAI.ComputerToolCallOutputItemOutput", - "azure.ai.projects.models.ComputerToolCallOutputItemOutputComputerScreenshot": "OpenAI.ComputerToolCallOutputItemOutputComputerScreenshot", - "azure.ai.projects.models.ComputerToolCallOutputItemParam": "OpenAI.ComputerToolCallOutputItemParam", - "azure.ai.projects.models.ComputerToolCallOutputItemResource": "OpenAI.ComputerToolCallOutputItemResource", - "azure.ai.projects.models.ComputerToolCallSafetyCheck": "OpenAI.ComputerToolCallSafetyCheck", + "azure.ai.projects.models.ComputerCallSafetyCheckParam": "OpenAI.ComputerCallSafetyCheckParam", + "azure.ai.projects.models.ComputerScreenshotImage": "OpenAI.ComputerScreenshotImage", "azure.ai.projects.models.ComputerUsePreviewTool": "OpenAI.ComputerUsePreviewTool", "azure.ai.projects.models.Connection": "Azure.AI.Projects.Connection", "azure.ai.projects.models.ContainerAppAgentDefinition": "Azure.AI.Projects.ContainerAppAgentDefinition", + "azure.ai.projects.models.ContainerFileCitationBody": "OpenAI.ContainerFileCitationBody", "azure.ai.projects.models.EvaluationRuleAction": "Azure.AI.Projects.EvaluationRuleAction", "azure.ai.projects.models.ContinuousEvaluationRuleAction": "Azure.AI.Projects.ContinuousEvaluationRuleAction", - "azure.ai.projects.models.Coordinate": "OpenAI.Coordinate", "azure.ai.projects.models.CosmosDBIndex": "Azure.AI.Projects.CosmosDBIndex", "azure.ai.projects.models.CreatedBy": "Azure.AI.Projects.CreatedBy", "azure.ai.projects.models.Trigger": "Azure.AI.Projects.Trigger", "azure.ai.projects.models.CronTrigger": "Azure.AI.Projects.CronTrigger", "azure.ai.projects.models.CustomCredential": "Azure.AI.Projects.CustomCredential", + "azure.ai.projects.models.CustomToolParamFormat": "OpenAI.CustomToolParamFormat", + "azure.ai.projects.models.CustomGrammarFormatParam": "OpenAI.CustomGrammarFormatParam", + "azure.ai.projects.models.CustomTextFormatParam": "OpenAI.CustomTextFormatParam", + "azure.ai.projects.models.CustomToolParam": "OpenAI.CustomToolParam", "azure.ai.projects.models.RecurrenceSchedule": "Azure.AI.Projects.RecurrenceSchedule", "azure.ai.projects.models.DailyRecurrenceSchedule": "Azure.AI.Projects.DailyRecurrenceSchedule", "azure.ai.projects.models.DatasetCredential": "Azure.AI.Projects.AssetCredentialResponse", @@ -105,6 +96,11 @@ "azure.ai.projects.models.DeleteAgentVersionResponse": "Azure.AI.Projects.DeleteAgentVersionResponse", "azure.ai.projects.models.DeleteMemoryStoreResult": "Azure.AI.Projects.DeleteMemoryStoreResponse", "azure.ai.projects.models.Deployment": "Azure.AI.Projects.Deployment", + "azure.ai.projects.models.DoubleClickAction": "OpenAI.DoubleClickAction", + "azure.ai.projects.models.Drag": "OpenAI.Drag", + "azure.ai.projects.models.DragPoint": "OpenAI.DragPoint", + "azure.ai.projects.models.InputItem": "OpenAI.InputItem", + "azure.ai.projects.models.EasyInputMessage": "OpenAI.EasyInputMessage", "azure.ai.projects.models.EmbeddingConfiguration": "Azure.AI.Projects.EmbeddingConfiguration", "azure.ai.projects.models.EntraIDCredentials": "Azure.AI.Projects.EntraIDCredentials", "azure.ai.projects.models.Error": "OpenAI.Error", @@ -127,67 +123,108 @@ "azure.ai.projects.models.EvaluatorVersion": "Azure.AI.Projects.EvaluatorVersion", "azure.ai.projects.models.FabricDataAgentToolParameters": "Azure.AI.Projects.FabricDataAgentToolParameters", "azure.ai.projects.models.FieldMapping": "Azure.AI.Projects.FieldMapping", + "azure.ai.projects.models.FileCitationBody": "OpenAI.FileCitationBody", "azure.ai.projects.models.FileDatasetVersion": "Azure.AI.Projects.FileDatasetVersion", + "azure.ai.projects.models.FilePath": "OpenAI.FilePath", "azure.ai.projects.models.FileSearchTool": "OpenAI.FileSearchTool", - "azure.ai.projects.models.FileSearchToolCallItemParam": "OpenAI.FileSearchToolCallItemParam", - "azure.ai.projects.models.FileSearchToolCallItemParamResult": "OpenAI.FileSearchToolCallItemParam.result.anonymous", - "azure.ai.projects.models.FileSearchToolCallItemResource": "OpenAI.FileSearchToolCallItemResource", + "azure.ai.projects.models.FileSearchToolCallResults": "OpenAI.FileSearchToolCallResults", "azure.ai.projects.models.FolderDatasetVersion": "Azure.AI.Projects.FolderDatasetVersion", + "azure.ai.projects.models.FunctionAndCustomToolCallOutput": "OpenAI.FunctionAndCustomToolCallOutput", + "azure.ai.projects.models.FunctionAndCustomToolCallOutputInputFileContent": "OpenAI.FunctionAndCustomToolCallOutputInputFileContent", + "azure.ai.projects.models.FunctionAndCustomToolCallOutputInputImageContent": "OpenAI.FunctionAndCustomToolCallOutputInputImageContent", + "azure.ai.projects.models.FunctionAndCustomToolCallOutputInputTextContent": "OpenAI.FunctionAndCustomToolCallOutputInputTextContent", + "azure.ai.projects.models.FunctionShellAction": "OpenAI.FunctionShellAction", + "azure.ai.projects.models.FunctionShellActionParam": "OpenAI.FunctionShellActionParam", + "azure.ai.projects.models.FunctionShellCallOutputContent": "OpenAI.FunctionShellCallOutputContent", + "azure.ai.projects.models.FunctionShellCallOutputContentParam": "OpenAI.FunctionShellCallOutputContentParam", + "azure.ai.projects.models.FunctionShellCallOutputOutcome": "OpenAI.FunctionShellCallOutputOutcome", + "azure.ai.projects.models.FunctionShellCallOutputExitOutcome": "OpenAI.FunctionShellCallOutputExitOutcome", + "azure.ai.projects.models.FunctionShellCallOutputOutcomeParam": "OpenAI.FunctionShellCallOutputOutcomeParam", + "azure.ai.projects.models.FunctionShellCallOutputExitOutcomeParam": "OpenAI.FunctionShellCallOutputExitOutcomeParam", + "azure.ai.projects.models.FunctionShellCallOutputTimeoutOutcome": "OpenAI.FunctionShellCallOutputTimeoutOutcome", + "azure.ai.projects.models.FunctionShellCallOutputTimeoutOutcomeParam": "OpenAI.FunctionShellCallOutputTimeoutOutcomeParam", + "azure.ai.projects.models.FunctionShellToolParam": "OpenAI.FunctionShellToolParam", "azure.ai.projects.models.FunctionTool": "OpenAI.FunctionTool", - "azure.ai.projects.models.FunctionToolCallItemParam": "OpenAI.FunctionToolCallItemParam", - "azure.ai.projects.models.FunctionToolCallItemResource": "OpenAI.FunctionToolCallItemResource", - "azure.ai.projects.models.FunctionToolCallOutputItemParam": "OpenAI.FunctionToolCallOutputItemParam", - "azure.ai.projects.models.FunctionToolCallOutputItemResource": "OpenAI.FunctionToolCallOutputItemResource", "azure.ai.projects.models.HostedAgentDefinition": "Azure.AI.Projects.HostedAgentDefinition", "azure.ai.projects.models.HourlyRecurrenceSchedule": "Azure.AI.Projects.HourlyRecurrenceSchedule", "azure.ai.projects.models.HumanEvaluationRuleAction": "Azure.AI.Projects.HumanEvaluationRuleAction", + "azure.ai.projects.models.HybridSearchOptions": "OpenAI.HybridSearchOptions", "azure.ai.projects.models.ImageBasedHostedAgentDefinition": "Azure.AI.Projects.ImageBasedHostedAgentDefinition", "azure.ai.projects.models.ImageGenTool": "OpenAI.ImageGenTool", - "azure.ai.projects.models.ImageGenToolCallItemParam": "OpenAI.ImageGenToolCallItemParam", - "azure.ai.projects.models.ImageGenToolCallItemResource": "OpenAI.ImageGenToolCallItemResource", - "azure.ai.projects.models.ImageGenToolInputImageMask": "OpenAI.ImageGenTool.input_image_mask.anonymous", + "azure.ai.projects.models.ImageGenToolInputImageMask": "OpenAI.ImageGenToolInputImageMask", + "azure.ai.projects.models.InputContent": "OpenAI.InputContent", + "azure.ai.projects.models.InputContentInputFileContent": "OpenAI.InputContentInputFileContent", + "azure.ai.projects.models.InputContentInputImageContent": "OpenAI.InputContentInputImageContent", + "azure.ai.projects.models.InputContentInputTextContent": "OpenAI.InputContentInputTextContent", + "azure.ai.projects.models.InputFileContentParam": "OpenAI.InputFileContentParam", + "azure.ai.projects.models.InputImageContentParamAutoParam": "OpenAI.InputImageContentParamAutoParam", + "azure.ai.projects.models.InputItemApplyPatchToolCallItemParam": "OpenAI.InputItemApplyPatchToolCallItemParam", + "azure.ai.projects.models.InputItemApplyPatchToolCallOutputItemParam": "OpenAI.InputItemApplyPatchToolCallOutputItemParam", + "azure.ai.projects.models.InputItemCodeInterpreterToolCall": "OpenAI.InputItemCodeInterpreterToolCall", + "azure.ai.projects.models.InputItemCompactionSummaryItemParam": "OpenAI.InputItemCompactionSummaryItemParam", + "azure.ai.projects.models.InputItemComputerCallOutputItemParam": "OpenAI.InputItemComputerCallOutputItemParam", + "azure.ai.projects.models.InputItemComputerToolCall": "OpenAI.InputItemComputerToolCall", + "azure.ai.projects.models.InputItemCustomToolCall": "OpenAI.InputItemCustomToolCall", + "azure.ai.projects.models.InputItemCustomToolCallOutput": "OpenAI.InputItemCustomToolCallOutput", + "azure.ai.projects.models.InputItemFileSearchToolCall": "OpenAI.InputItemFileSearchToolCall", + "azure.ai.projects.models.InputItemFunctionCallOutputItemParam": "OpenAI.InputItemFunctionCallOutputItemParam", + "azure.ai.projects.models.InputItemFunctionShellCallItemParam": "OpenAI.InputItemFunctionShellCallItemParam", + "azure.ai.projects.models.InputItemFunctionShellCallOutputItemParam": "OpenAI.InputItemFunctionShellCallOutputItemParam", + "azure.ai.projects.models.InputItemFunctionToolCall": "OpenAI.InputItemFunctionToolCall", + "azure.ai.projects.models.InputItemImageGenToolCall": "OpenAI.InputItemImageGenToolCall", + "azure.ai.projects.models.InputItemLocalShellToolCall": "OpenAI.InputItemLocalShellToolCall", + "azure.ai.projects.models.InputItemLocalShellToolCallOutput": "OpenAI.InputItemLocalShellToolCallOutput", + "azure.ai.projects.models.InputItemMcpApprovalRequest": "OpenAI.InputItemMcpApprovalRequest", + "azure.ai.projects.models.InputItemMcpApprovalResponse": "OpenAI.InputItemMcpApprovalResponse", + "azure.ai.projects.models.InputItemMcpListTools": "OpenAI.InputItemMcpListTools", + "azure.ai.projects.models.InputItemMcpToolCall": "OpenAI.InputItemMcpToolCall", + "azure.ai.projects.models.InputItemOutputMessage": "OpenAI.InputItemOutputMessage", + "azure.ai.projects.models.InputItemReasoningItem": "OpenAI.InputItemReasoningItem", + "azure.ai.projects.models.InputItemWebSearchToolCall": "OpenAI.InputItemWebSearchToolCall", + "azure.ai.projects.models.ItemResource": "OpenAI.ItemResource", + "azure.ai.projects.models.InputMessageResource": "OpenAI.InputMessageResource", + "azure.ai.projects.models.InputTextContentParam": "OpenAI.InputTextContentParam", "azure.ai.projects.models.Insight": "Azure.AI.Projects.Insight", "azure.ai.projects.models.InsightCluster": "Azure.AI.Projects.InsightCluster", "azure.ai.projects.models.InsightModelConfiguration": "Azure.AI.Projects.InsightModelConfiguration", "azure.ai.projects.models.InsightScheduleTask": "Azure.AI.Projects.InsightScheduleTask", "azure.ai.projects.models.InsightsMetadata": "Azure.AI.Projects.InsightsMetadata", "azure.ai.projects.models.InsightSummary": "Azure.AI.Projects.InsightSummary", - "azure.ai.projects.models.ItemContent": "OpenAI.ItemContent", - "azure.ai.projects.models.ItemContentInputAudio": "OpenAI.ItemContentInputAudio", - "azure.ai.projects.models.ItemContentInputFile": "OpenAI.ItemContentInputFile", - "azure.ai.projects.models.ItemContentInputImage": "OpenAI.ItemContentInputImage", - "azure.ai.projects.models.ItemContentInputText": "OpenAI.ItemContentInputText", - "azure.ai.projects.models.ItemContentOutputAudio": "OpenAI.ItemContentOutputAudio", - "azure.ai.projects.models.ItemContentOutputText": "OpenAI.ItemContentOutputText", - "azure.ai.projects.models.ItemContentRefusal": "OpenAI.ItemContentRefusal", - "azure.ai.projects.models.ItemReferenceItemParam": "OpenAI.ItemReferenceItemParam", + "azure.ai.projects.models.ItemReferenceParam": "OpenAI.ItemReferenceParam", + "azure.ai.projects.models.ItemResourceApplyPatchToolCall": "OpenAI.ItemResourceApplyPatchToolCall", + "azure.ai.projects.models.ItemResourceApplyPatchToolCallOutput": "OpenAI.ItemResourceApplyPatchToolCallOutput", + "azure.ai.projects.models.ItemResourceCodeInterpreterToolCall": "OpenAI.ItemResourceCodeInterpreterToolCall", + "azure.ai.projects.models.ItemResourceComputerToolCall": "OpenAI.ItemResourceComputerToolCall", + "azure.ai.projects.models.ItemResourceComputerToolCallOutputResource": "OpenAI.ItemResourceComputerToolCallOutputResource", + "azure.ai.projects.models.ItemResourceFileSearchToolCall": "OpenAI.ItemResourceFileSearchToolCall", + "azure.ai.projects.models.ItemResourceFunctionShellCall": "OpenAI.ItemResourceFunctionShellCall", + "azure.ai.projects.models.ItemResourceFunctionShellCallOutput": "OpenAI.ItemResourceFunctionShellCallOutput", + "azure.ai.projects.models.ItemResourceFunctionToolCallOutputResource": "OpenAI.ItemResourceFunctionToolCallOutputResource", + "azure.ai.projects.models.ItemResourceFunctionToolCallResource": "OpenAI.ItemResourceFunctionToolCallResource", + "azure.ai.projects.models.ItemResourceImageGenToolCall": "OpenAI.ItemResourceImageGenToolCall", + "azure.ai.projects.models.ItemResourceLocalShellToolCall": "OpenAI.ItemResourceLocalShellToolCall", + "azure.ai.projects.models.ItemResourceLocalShellToolCallOutput": "OpenAI.ItemResourceLocalShellToolCallOutput", + "azure.ai.projects.models.ItemResourceMcpApprovalRequest": "OpenAI.ItemResourceMcpApprovalRequest", + "azure.ai.projects.models.ItemResourceMcpApprovalResponseResource": "OpenAI.ItemResourceMcpApprovalResponseResource", + "azure.ai.projects.models.ItemResourceMcpListTools": "OpenAI.ItemResourceMcpListTools", + "azure.ai.projects.models.ItemResourceMcpToolCall": "OpenAI.ItemResourceMcpToolCall", + "azure.ai.projects.models.ItemResourceOutputMessage": "OpenAI.ItemResourceOutputMessage", + "azure.ai.projects.models.ItemResourceWebSearchToolCall": "OpenAI.ItemResourceWebSearchToolCall", + "azure.ai.projects.models.KeyPressAction": "OpenAI.KeyPressAction", "azure.ai.projects.models.LocalShellExecAction": "OpenAI.LocalShellExecAction", - "azure.ai.projects.models.LocalShellTool": "OpenAI.LocalShellTool", - "azure.ai.projects.models.LocalShellToolCallItemParam": "OpenAI.LocalShellToolCallItemParam", - "azure.ai.projects.models.LocalShellToolCallItemResource": "OpenAI.LocalShellToolCallItemResource", - "azure.ai.projects.models.LocalShellToolCallOutputItemParam": "OpenAI.LocalShellToolCallOutputItemParam", - "azure.ai.projects.models.LocalShellToolCallOutputItemResource": "OpenAI.LocalShellToolCallOutputItemResource", + "azure.ai.projects.models.LocalShellToolParam": "OpenAI.LocalShellToolParam", "azure.ai.projects.models.LogProb": "OpenAI.LogProb", "azure.ai.projects.models.ManagedAzureAISearchIndex": "Azure.AI.Projects.ManagedAzureAISearchIndex", - "azure.ai.projects.models.MCPApprovalRequestItemParam": "OpenAI.MCPApprovalRequestItemParam", - "azure.ai.projects.models.MCPApprovalRequestItemResource": "OpenAI.MCPApprovalRequestItemResource", - "azure.ai.projects.models.MCPApprovalResponseItemParam": "OpenAI.MCPApprovalResponseItemParam", - "azure.ai.projects.models.MCPApprovalResponseItemResource": "OpenAI.MCPApprovalResponseItemResource", - "azure.ai.projects.models.MCPCallItemParam": "OpenAI.MCPCallItemParam", - "azure.ai.projects.models.MCPCallItemResource": "OpenAI.MCPCallItemResource", - "azure.ai.projects.models.MCPListToolsItemParam": "OpenAI.MCPListToolsItemParam", - "azure.ai.projects.models.MCPListToolsItemResource": "OpenAI.MCPListToolsItemResource", "azure.ai.projects.models.MCPListToolsTool": "OpenAI.MCPListToolsTool", + "azure.ai.projects.models.MCPListToolsToolAnnotations": "OpenAI.MCPListToolsToolAnnotations", + "azure.ai.projects.models.MCPListToolsToolInputSchema": "OpenAI.MCPListToolsToolInputSchema", "azure.ai.projects.models.MCPTool": "OpenAI.MCPTool", - "azure.ai.projects.models.MCPToolAllowedTools1": "OpenAI.MCPTool.allowed_tools.anonymous", - "azure.ai.projects.models.MCPToolRequireApproval1": "OpenAI.MCPTool.require_approval.anonymous", - "azure.ai.projects.models.MCPToolRequireApprovalAlways": "OpenAI.MCPTool.require_approval.always.anonymous", - "azure.ai.projects.models.MCPToolRequireApprovalNever": "OpenAI.MCPTool.require_approval.never.anonymous", + "azure.ai.projects.models.MCPToolFilter": "OpenAI.MCPToolFilter", + "azure.ai.projects.models.MCPToolRequireApproval": "OpenAI.MCPToolRequireApproval", "azure.ai.projects.models.MemoryOperation": "Azure.AI.Projects.MemoryOperation", "azure.ai.projects.models.MemorySearchItem": "Azure.AI.Projects.MemorySearchItem", "azure.ai.projects.models.MemorySearchOptions": "Azure.AI.Projects.MemorySearchOptions", - "azure.ai.projects.models.MemorySearchTool": "Azure.AI.Projects.MemorySearchTool", - "azure.ai.projects.models.MemorySearchToolCallItemParam": "Azure.AI.Projects.MemorySearchToolCallItemParam", + "azure.ai.projects.models.MemorySearchPreviewTool": "Azure.AI.Projects.MemorySearchPreviewTool", "azure.ai.projects.models.MemorySearchToolCallItemResource": "Azure.AI.Projects.MemorySearchToolCallItemResource", "azure.ai.projects.models.MemoryStoreDefinition": "Azure.AI.Projects.MemoryStoreDefinition", "azure.ai.projects.models.MemoryStoreDefaultDefinition": "Azure.AI.Projects.MemoryStoreDefaultDefinition", @@ -195,19 +232,17 @@ "azure.ai.projects.models.MemoryStoreDeleteScopeResult": "Azure.AI.Projects.MemoryStoreDeleteScopeResponse", "azure.ai.projects.models.MemoryStoreDetails": "Azure.AI.Projects.MemoryStoreObject", "azure.ai.projects.models.MemoryStoreOperationUsage": "Azure.AI.Projects.MemoryStoreOperationUsage", - "azure.ai.projects.models.MemoryStoreOperationUsageInputTokensDetails": "Azure.AI.Projects.MemoryStoreOperationUsage.input_tokens_details.anonymous", - "azure.ai.projects.models.MemoryStoreOperationUsageOutputTokensDetails": "Azure.AI.Projects.MemoryStoreOperationUsage.output_tokens_details.anonymous", "azure.ai.projects.models.MemoryStoreSearchResult": "Azure.AI.Projects.MemoryStoreSearchResponse", "azure.ai.projects.models.MemoryStoreUpdateCompletedResult": "Azure.AI.Projects.MemoryStoreUpdateCompletedResult", "azure.ai.projects.models.MemoryStoreUpdateResult": "Azure.AI.Projects.MemoryStoreUpdateResponse", - "azure.ai.projects.models.MicrosoftFabricAgentTool": "Azure.AI.Projects.MicrosoftFabricAgentTool", + "azure.ai.projects.models.MicrosoftFabricPreviewTool": "Azure.AI.Projects.MicrosoftFabricPreviewTool", "azure.ai.projects.models.ModelDeployment": "Azure.AI.Projects.ModelDeployment", "azure.ai.projects.models.ModelDeploymentSku": "Azure.AI.Projects.Sku", "azure.ai.projects.models.MonthlyRecurrenceSchedule": "Azure.AI.Projects.MonthlyRecurrenceSchedule", + "azure.ai.projects.models.Move": "OpenAI.Move", "azure.ai.projects.models.NoAuthenticationCredentials": "Azure.AI.Projects.NoAuthenticationCredentials", "azure.ai.projects.models.OAuthConsentRequestItemResource": "Azure.AI.Projects.OAuthConsentRequestItemResource", "azure.ai.projects.models.OneTimeTrigger": "Azure.AI.Projects.OneTimeTrigger", - "azure.ai.projects.models.OpenApiAgentTool": "Azure.AI.Projects.OpenApiAgentTool", "azure.ai.projects.models.OpenApiAuthDetails": "Azure.AI.Projects.OpenApiAuthDetails", "azure.ai.projects.models.OpenApiAnonymousAuthDetails": "Azure.AI.Projects.OpenApiAnonymousAuthDetails", "azure.ai.projects.models.OpenApiFunctionDefinition": "Azure.AI.Projects.OpenApiFunctionDefinition", @@ -216,9 +251,13 @@ "azure.ai.projects.models.OpenApiManagedSecurityScheme": "Azure.AI.Projects.OpenApiManagedSecurityScheme", "azure.ai.projects.models.OpenApiProjectConnectionAuthDetails": "Azure.AI.Projects.OpenApiProjectConnectionAuthDetails", "azure.ai.projects.models.OpenApiProjectConnectionSecurityScheme": "Azure.AI.Projects.OpenApiProjectConnectionSecurityScheme", + "azure.ai.projects.models.OpenApiTool": "Azure.AI.Projects.OpenApiTool", + "azure.ai.projects.models.OutputContent": "OpenAI.OutputContent", + "azure.ai.projects.models.OutputMessageContent": "OpenAI.OutputMessageContent", + "azure.ai.projects.models.OutputMessageContentOutputTextContent": "OpenAI.OutputMessageContentOutputTextContent", + "azure.ai.projects.models.OutputMessageContentRefusalContent": "OpenAI.OutputMessageContentRefusalContent", "azure.ai.projects.models.PendingUploadRequest": "Azure.AI.Projects.PendingUploadRequest", "azure.ai.projects.models.PendingUploadResponse": "Azure.AI.Projects.PendingUploadResponse", - "azure.ai.projects.models.Prompt": "OpenAI.Prompt", "azure.ai.projects.models.PromptAgentDefinition": "Azure.AI.Projects.PromptAgentDefinition", "azure.ai.projects.models.PromptAgentDefinitionText": "Azure.AI.Projects.PromptAgentDefinition.text.anonymous", "azure.ai.projects.models.PromptBasedEvaluatorDefinition": "Azure.AI.Projects.PromptBasedEvaluatorDefinition", @@ -226,113 +265,45 @@ "azure.ai.projects.models.RaiConfig": "Azure.AI.Projects.RaiConfig", "azure.ai.projects.models.RankingOptions": "OpenAI.RankingOptions", "azure.ai.projects.models.Reasoning": "OpenAI.Reasoning", - "azure.ai.projects.models.ReasoningItemParam": "OpenAI.ReasoningItemParam", - "azure.ai.projects.models.ReasoningItemResource": "OpenAI.ReasoningItemResource", - "azure.ai.projects.models.ReasoningItemSummaryPart": "OpenAI.ReasoningItemSummaryPart", - "azure.ai.projects.models.ReasoningItemSummaryTextPart": "OpenAI.ReasoningItemSummaryTextPart", + "azure.ai.projects.models.ReasoningTextContent": "OpenAI.ReasoningTextContent", "azure.ai.projects.models.RecurrenceTrigger": "Azure.AI.Projects.RecurrenceTrigger", "azure.ai.projects.models.RedTeam": "Azure.AI.Projects.RedTeam", - "azure.ai.projects.models.Response": "OpenAI.Response", - "azure.ai.projects.models.ResponseStreamEvent": "OpenAI.ResponseStreamEvent", - "azure.ai.projects.models.ResponseCodeInterpreterCallCodeDeltaEvent": "OpenAI.ResponseCodeInterpreterCallCodeDeltaEvent", - "azure.ai.projects.models.ResponseCodeInterpreterCallCodeDoneEvent": "OpenAI.ResponseCodeInterpreterCallCodeDoneEvent", - "azure.ai.projects.models.ResponseCodeInterpreterCallCompletedEvent": "OpenAI.ResponseCodeInterpreterCallCompletedEvent", - "azure.ai.projects.models.ResponseCodeInterpreterCallInProgressEvent": "OpenAI.ResponseCodeInterpreterCallInProgressEvent", - "azure.ai.projects.models.ResponseCodeInterpreterCallInterpretingEvent": "OpenAI.ResponseCodeInterpreterCallInterpretingEvent", - "azure.ai.projects.models.ResponseCompletedEvent": "OpenAI.ResponseCompletedEvent", - "azure.ai.projects.models.ResponseContentPartAddedEvent": "OpenAI.ResponseContentPartAddedEvent", - "azure.ai.projects.models.ResponseContentPartDoneEvent": "OpenAI.ResponseContentPartDoneEvent", - "azure.ai.projects.models.ResponseConversation1": "OpenAI.Response.conversation.anonymous", - "azure.ai.projects.models.ResponseCreatedEvent": "OpenAI.ResponseCreatedEvent", - "azure.ai.projects.models.ResponseError": "OpenAI.ResponseError", - "azure.ai.projects.models.ResponseErrorEvent": "OpenAI.ResponseErrorEvent", - "azure.ai.projects.models.ResponseFailedEvent": "OpenAI.ResponseFailedEvent", - "azure.ai.projects.models.ResponseFileSearchCallCompletedEvent": "OpenAI.ResponseFileSearchCallCompletedEvent", - "azure.ai.projects.models.ResponseFileSearchCallInProgressEvent": "OpenAI.ResponseFileSearchCallInProgressEvent", - "azure.ai.projects.models.ResponseFileSearchCallSearchingEvent": "OpenAI.ResponseFileSearchCallSearchingEvent", - "azure.ai.projects.models.ResponseFunctionCallArgumentsDeltaEvent": "OpenAI.ResponseFunctionCallArgumentsDeltaEvent", - "azure.ai.projects.models.ResponseFunctionCallArgumentsDoneEvent": "OpenAI.ResponseFunctionCallArgumentsDoneEvent", - "azure.ai.projects.models.ResponseImageGenCallCompletedEvent": "OpenAI.ResponseImageGenCallCompletedEvent", - "azure.ai.projects.models.ResponseImageGenCallGeneratingEvent": "OpenAI.ResponseImageGenCallGeneratingEvent", - "azure.ai.projects.models.ResponseImageGenCallInProgressEvent": "OpenAI.ResponseImageGenCallInProgressEvent", - "azure.ai.projects.models.ResponseImageGenCallPartialImageEvent": "OpenAI.ResponseImageGenCallPartialImageEvent", - "azure.ai.projects.models.ResponseIncompleteDetails1": "OpenAI.Response.incomplete_details.anonymous", - "azure.ai.projects.models.ResponseIncompleteEvent": "OpenAI.ResponseIncompleteEvent", - "azure.ai.projects.models.ResponseInProgressEvent": "OpenAI.ResponseInProgressEvent", - "azure.ai.projects.models.ResponseMCPCallArgumentsDeltaEvent": "OpenAI.ResponseMCPCallArgumentsDeltaEvent", - "azure.ai.projects.models.ResponseMCPCallArgumentsDoneEvent": "OpenAI.ResponseMCPCallArgumentsDoneEvent", - "azure.ai.projects.models.ResponseMCPCallCompletedEvent": "OpenAI.ResponseMCPCallCompletedEvent", - "azure.ai.projects.models.ResponseMCPCallFailedEvent": "OpenAI.ResponseMCPCallFailedEvent", - "azure.ai.projects.models.ResponseMCPCallInProgressEvent": "OpenAI.ResponseMCPCallInProgressEvent", - "azure.ai.projects.models.ResponseMCPListToolsCompletedEvent": "OpenAI.ResponseMCPListToolsCompletedEvent", - "azure.ai.projects.models.ResponseMCPListToolsFailedEvent": "OpenAI.ResponseMCPListToolsFailedEvent", - "azure.ai.projects.models.ResponseMCPListToolsInProgressEvent": "OpenAI.ResponseMCPListToolsInProgressEvent", - "azure.ai.projects.models.ResponseOutputItemAddedEvent": "OpenAI.ResponseOutputItemAddedEvent", - "azure.ai.projects.models.ResponseOutputItemDoneEvent": "OpenAI.ResponseOutputItemDoneEvent", - "azure.ai.projects.models.ResponsePromptVariables": "OpenAI.ResponsePromptVariables", - "azure.ai.projects.models.ResponseQueuedEvent": "OpenAI.ResponseQueuedEvent", - "azure.ai.projects.models.ResponseReasoningDeltaEvent": "OpenAI.ResponseReasoningDeltaEvent", - "azure.ai.projects.models.ResponseReasoningDoneEvent": "OpenAI.ResponseReasoningDoneEvent", - "azure.ai.projects.models.ResponseReasoningSummaryDeltaEvent": "OpenAI.ResponseReasoningSummaryDeltaEvent", - "azure.ai.projects.models.ResponseReasoningSummaryDoneEvent": "OpenAI.ResponseReasoningSummaryDoneEvent", - "azure.ai.projects.models.ResponseReasoningSummaryPartAddedEvent": "OpenAI.ResponseReasoningSummaryPartAddedEvent", - "azure.ai.projects.models.ResponseReasoningSummaryPartDoneEvent": "OpenAI.ResponseReasoningSummaryPartDoneEvent", - "azure.ai.projects.models.ResponseReasoningSummaryTextDeltaEvent": "OpenAI.ResponseReasoningSummaryTextDeltaEvent", - "azure.ai.projects.models.ResponseReasoningSummaryTextDoneEvent": "OpenAI.ResponseReasoningSummaryTextDoneEvent", - "azure.ai.projects.models.ResponseRefusalDeltaEvent": "OpenAI.ResponseRefusalDeltaEvent", - "azure.ai.projects.models.ResponseRefusalDoneEvent": "OpenAI.ResponseRefusalDoneEvent", - "azure.ai.projects.models.ResponsesMessageItemParam": "OpenAI.ResponsesMessageItemParam", - "azure.ai.projects.models.ResponsesAssistantMessageItemParam": "OpenAI.ResponsesAssistantMessageItemParam", - "azure.ai.projects.models.ResponsesMessageItemResource": "OpenAI.ResponsesMessageItemResource", - "azure.ai.projects.models.ResponsesAssistantMessageItemResource": "OpenAI.ResponsesAssistantMessageItemResource", - "azure.ai.projects.models.ResponsesDeveloperMessageItemParam": "OpenAI.ResponsesDeveloperMessageItemParam", - "azure.ai.projects.models.ResponsesDeveloperMessageItemResource": "OpenAI.ResponsesDeveloperMessageItemResource", - "azure.ai.projects.models.ResponsesSystemMessageItemParam": "OpenAI.ResponsesSystemMessageItemParam", - "azure.ai.projects.models.ResponsesSystemMessageItemResource": "OpenAI.ResponsesSystemMessageItemResource", - "azure.ai.projects.models.ResponsesUserMessageItemParam": "OpenAI.ResponsesUserMessageItemParam", - "azure.ai.projects.models.ResponsesUserMessageItemResource": "OpenAI.ResponsesUserMessageItemResource", - "azure.ai.projects.models.ResponseText": "OpenAI.Response.text.anonymous", - "azure.ai.projects.models.ResponseTextDeltaEvent": "OpenAI.ResponseTextDeltaEvent", - "azure.ai.projects.models.ResponseTextDoneEvent": "OpenAI.ResponseTextDoneEvent", - "azure.ai.projects.models.ResponseTextFormatConfiguration": "OpenAI.ResponseTextFormatConfiguration", - "azure.ai.projects.models.ResponseTextFormatConfigurationJsonObject": "OpenAI.ResponseTextFormatConfigurationJsonObject", - "azure.ai.projects.models.ResponseTextFormatConfigurationJsonSchema": "OpenAI.ResponseTextFormatConfigurationJsonSchema", - "azure.ai.projects.models.ResponseTextFormatConfigurationText": "OpenAI.ResponseTextFormatConfigurationText", - "azure.ai.projects.models.ResponseUsage": "OpenAI.ResponseUsage", - "azure.ai.projects.models.ResponseWebSearchCallCompletedEvent": "OpenAI.ResponseWebSearchCallCompletedEvent", - "azure.ai.projects.models.ResponseWebSearchCallInProgressEvent": "OpenAI.ResponseWebSearchCallInProgressEvent", - "azure.ai.projects.models.ResponseWebSearchCallSearchingEvent": "OpenAI.ResponseWebSearchCallSearchingEvent", + "azure.ai.projects.models.ResponseUsageInputTokensDetails": "OpenAI.ResponseUsageInputTokensDetails", + "azure.ai.projects.models.ResponseUsageOutputTokensDetails": "OpenAI.ResponseUsageOutputTokensDetails", "azure.ai.projects.models.SASCredentials": "Azure.AI.Projects.SASCredentials", "azure.ai.projects.models.Schedule": "Azure.AI.Projects.Schedule", "azure.ai.projects.models.ScheduleRun": "Azure.AI.Projects.ScheduleRun", - "azure.ai.projects.models.SharepointAgentTool": "Azure.AI.Projects.SharepointAgentTool", + "azure.ai.projects.models.Screenshot": "OpenAI.Screenshot", + "azure.ai.projects.models.Scroll": "OpenAI.Scroll", "azure.ai.projects.models.SharepointGroundingToolParameters": "Azure.AI.Projects.SharepointGroundingToolParameters", + "azure.ai.projects.models.SharepointPreviewTool": "Azure.AI.Projects.SharepointPreviewTool", "azure.ai.projects.models.StructuredInputDefinition": "Azure.AI.Projects.StructuredInputDefinition", "azure.ai.projects.models.StructuredOutputDefinition": "Azure.AI.Projects.StructuredOutputDefinition", "azure.ai.projects.models.StructuredOutputsItemResource": "Azure.AI.Projects.StructuredOutputsItemResource", + "azure.ai.projects.models.Summary": "OpenAI.Summary", "azure.ai.projects.models.TaxonomyCategory": "Azure.AI.Projects.TaxonomyCategory", "azure.ai.projects.models.TaxonomySubCategory": "Azure.AI.Projects.TaxonomySubCategory", - "azure.ai.projects.models.ToolChoiceObject": "OpenAI.ToolChoiceObject", - "azure.ai.projects.models.ToolChoiceObjectCodeInterpreter": "OpenAI.ToolChoiceObjectCodeInterpreter", - "azure.ai.projects.models.ToolChoiceObjectComputer": "OpenAI.ToolChoiceObjectComputer", - "azure.ai.projects.models.ToolChoiceObjectFileSearch": "OpenAI.ToolChoiceObjectFileSearch", - "azure.ai.projects.models.ToolChoiceObjectFunction": "OpenAI.ToolChoiceObjectFunction", - "azure.ai.projects.models.ToolChoiceObjectImageGen": "OpenAI.ToolChoiceObjectImageGen", - "azure.ai.projects.models.ToolChoiceObjectMCP": "OpenAI.ToolChoiceObjectMCP", - "azure.ai.projects.models.ToolChoiceObjectWebSearch": "OpenAI.ToolChoiceObjectWebSearch", + "azure.ai.projects.models.TextResponseFormatConfiguration": "OpenAI.TextResponseFormatConfiguration", + "azure.ai.projects.models.TextResponseFormatConfigurationResponseFormatJsonObject": "OpenAI.TextResponseFormatConfigurationResponseFormatJsonObject", + "azure.ai.projects.models.TextResponseFormatConfigurationResponseFormatText": "OpenAI.TextResponseFormatConfigurationResponseFormatText", + "azure.ai.projects.models.TextResponseFormatJsonSchema": "OpenAI.TextResponseFormatJsonSchema", "azure.ai.projects.models.ToolDescription": "Azure.AI.Projects.ToolDescription", "azure.ai.projects.models.ToolProjectConnection": "Azure.AI.Projects.ToolProjectConnection", "azure.ai.projects.models.TopLogProb": "OpenAI.TopLogProb", + "azure.ai.projects.models.Type": "OpenAI.Type", + "azure.ai.projects.models.UrlCitationBody": "OpenAI.UrlCitationBody", "azure.ai.projects.models.UserProfileMemoryItem": "Azure.AI.Projects.UserProfileMemoryItem", "azure.ai.projects.models.VectorStoreFileAttributes": "OpenAI.VectorStoreFileAttributes", - "azure.ai.projects.models.WebSearchAction": "OpenAI.WebSearchAction", + "azure.ai.projects.models.Wait": "OpenAI.Wait", "azure.ai.projects.models.WebSearchActionFind": "OpenAI.WebSearchActionFind", "azure.ai.projects.models.WebSearchActionOpenPage": "OpenAI.WebSearchActionOpenPage", "azure.ai.projects.models.WebSearchActionSearch": "OpenAI.WebSearchActionSearch", "azure.ai.projects.models.WebSearchActionSearchSources": "OpenAI.WebSearchActionSearchSources", + "azure.ai.projects.models.WebSearchApproximateLocation": "OpenAI.WebSearchApproximateLocation", + "azure.ai.projects.models.WebSearchConfiguration": "Azure.AI.Projects.WebSearchConfiguration", "azure.ai.projects.models.WebSearchPreviewTool": "OpenAI.WebSearchPreviewTool", - "azure.ai.projects.models.WebSearchToolCallItemParam": "OpenAI.WebSearchToolCallItemParam", - "azure.ai.projects.models.WebSearchToolCallItemResource": "OpenAI.WebSearchToolCallItemResource", + "azure.ai.projects.models.WebSearchTool": "OpenAI.WebSearchTool", + "azure.ai.projects.models.WebSearchToolFilters": "OpenAI.WebSearchToolFilters", "azure.ai.projects.models.WeeklyRecurrenceSchedule": "Azure.AI.Projects.WeeklyRecurrenceSchedule", "azure.ai.projects.models.WorkflowActionOutputItemResource": "Azure.AI.Projects.WorkflowActionOutputItemResource", "azure.ai.projects.models.WorkflowAgentDefinition": "Azure.AI.Projects.WorkflowAgentDefinition", @@ -340,21 +311,36 @@ "azure.ai.projects.models.AgentProtocol": "Azure.AI.Projects.AgentProtocol", "azure.ai.projects.models.ToolType": "OpenAI.ToolType", "azure.ai.projects.models.AzureAISearchQueryType": "Azure.AI.Projects.AzureAISearchQueryType", + "azure.ai.projects.models.ContainerMemoryLimit": "OpenAI.ContainerMemoryLimit", + "azure.ai.projects.models.ComputerEnvironment": "OpenAI.ComputerEnvironment", + "azure.ai.projects.models.CustomToolParamFormatType": "OpenAI.CustomToolParamFormatType", + "azure.ai.projects.models.GrammarSyntax1": "OpenAI.GrammarSyntax1", + "azure.ai.projects.models.RankerVersionType": "OpenAI.RankerVersionType", + "azure.ai.projects.models.InputFidelity": "OpenAI.InputFidelity", "azure.ai.projects.models.OpenApiAuthType": "Azure.AI.Projects.OpenApiAuthType", - "azure.ai.projects.models.LocationType": "OpenAI.LocationType", - "azure.ai.projects.models.ReasoningEffort": "OpenAI.ReasoningEffort", - "azure.ai.projects.models.ResponseTextFormatConfigurationType": "OpenAI.ResponseTextFormatConfigurationType", + "azure.ai.projects.models.SearchContextSize": "OpenAI.SearchContextSize", + "azure.ai.projects.models.TextResponseFormatConfigurationType": "OpenAI.TextResponseFormatConfigurationType", + "azure.ai.projects.models.PageOrder": "Azure.AI.Projects.PageOrder", + "azure.ai.projects.models.ContainerLogKind": "Azure.AI.Projects.ContainerLogKind", "azure.ai.projects.models.MemoryStoreKind": "Azure.AI.Projects.MemoryStoreKind", "azure.ai.projects.models.MemoryItemKind": "Azure.AI.Projects.MemoryItemKind", - "azure.ai.projects.models.ItemType": "OpenAI.ItemType", - "azure.ai.projects.models.CodeInterpreterOutputType": "OpenAI.CodeInterpreterOutputType", + "azure.ai.projects.models.InputItemType": "OpenAI.InputItemType", + "azure.ai.projects.models.ApplyPatchCallStatusParam": "OpenAI.ApplyPatchCallStatusParam", + "azure.ai.projects.models.ApplyPatchOperationParamType": "OpenAI.ApplyPatchOperationParamType", + "azure.ai.projects.models.ApplyPatchCallOutputStatusParam": "OpenAI.ApplyPatchCallOutputStatusParam", "azure.ai.projects.models.ComputerActionType": "OpenAI.ComputerActionType", - "azure.ai.projects.models.ComputerToolCallOutputItemOutputType": "OpenAI.ComputerToolCallOutputItemOutputType", - "azure.ai.projects.models.ResponsesMessageRole": "OpenAI.ResponsesMessageRole", - "azure.ai.projects.models.ItemContentType": "OpenAI.ItemContentType", + "azure.ai.projects.models.ClickButtonType": "OpenAI.ClickButtonType", + "azure.ai.projects.models.FunctionCallItemStatus": "OpenAI.FunctionCallItemStatus", + "azure.ai.projects.models.FunctionAndCustomToolCallOutputType": "OpenAI.FunctionAndCustomToolCallOutputType", + "azure.ai.projects.models.ImageDetail": "OpenAI.ImageDetail", + "azure.ai.projects.models.DetailEnum": "OpenAI.DetailEnum", + "azure.ai.projects.models.MCPToolCallStatus": "OpenAI.MCPToolCallStatus", + "azure.ai.projects.models.InputContentType": "OpenAI.InputContentType", + "azure.ai.projects.models.OutputMessageContentType": "OpenAI.OutputMessageContentType", "azure.ai.projects.models.AnnotationType": "OpenAI.AnnotationType", - "azure.ai.projects.models.ReasoningItemSummaryPartType": "OpenAI.ReasoningItemSummaryPartType", - "azure.ai.projects.models.WebSearchActionType": "OpenAI.WebSearchActionType", + "azure.ai.projects.models.OutputContentType": "OpenAI.OutputContentType", + "azure.ai.projects.models.FunctionShellCallItemStatus": "OpenAI.FunctionShellCallItemStatus", + "azure.ai.projects.models.FunctionShellCallOutputOutcomeParamType": "OpenAI.FunctionShellCallOutputOutcomeParamType", "azure.ai.projects.models.MemoryOperationKind": "Azure.AI.Projects.MemoryOperationKind", "azure.ai.projects.models.ConnectionType": "Azure.AI.Projects.ConnectionType", "azure.ai.projects.models.CredentialType": "Azure.AI.Projects.CredentialType", @@ -382,11 +368,12 @@ "azure.ai.projects.models.DayOfWeek": "Azure.AI.Projects.DayOfWeek", "azure.ai.projects.models.ScheduleTaskType": "Azure.AI.Projects.ScheduleTaskType", "azure.ai.projects.models.MemoryStoreUpdateStatus": "Azure.AI.Projects.MemoryStoreUpdateStatus", - "azure.ai.projects.models.ServiceTier": "OpenAI.ServiceTier", - "azure.ai.projects.models.ToolChoiceOptions": "OpenAI.ToolChoiceOptions", - "azure.ai.projects.models.ToolChoiceObjectType": "OpenAI.ToolChoiceObjectType", - "azure.ai.projects.models.ResponseErrorCode": "OpenAI.ResponseErrorCode", - "azure.ai.projects.models.ResponseStreamEventType": "OpenAI.ResponseStreamEventType", + "azure.ai.projects.models.FunctionShellCallOutputOutcomeType": "OpenAI.FunctionShellCallOutputOutcomeType", + "azure.ai.projects.models.ApplyPatchFileOperationType": "OpenAI.ApplyPatchFileOperationType", + "azure.ai.projects.models.ItemResourceType": "OpenAI.ItemResourceType", + "azure.ai.projects.models.ApplyPatchCallStatus": "OpenAI.ApplyPatchCallStatus", + "azure.ai.projects.models.ApplyPatchCallOutputStatus": "OpenAI.ApplyPatchCallOutputStatus", + "azure.ai.projects.models.LocalShellCallStatus": "OpenAI.LocalShellCallStatus", "azure.ai.projects.operations.AgentsOperations.get": "Azure.AI.Projects.Agents.getAgent", "azure.ai.projects.aio.operations.AgentsOperations.get": "Azure.AI.Projects.Agents.getAgent", "azure.ai.projects.operations.AgentsOperations.create": "Azure.AI.Projects.Agents.createAgent", @@ -411,6 +398,8 @@ "azure.ai.projects.aio.operations.AgentsOperations.delete_version": "Azure.AI.Projects.Agents.deleteAgentVersion", "azure.ai.projects.operations.AgentsOperations.list_versions": "Azure.AI.Projects.Agents.listAgentVersions", "azure.ai.projects.aio.operations.AgentsOperations.list_versions": "Azure.AI.Projects.Agents.listAgentVersions", + "azure.ai.projects.operations.AgentsOperations.stream_agent_container_logs": "Azure.AI.Projects.Agents.streamAgentContainerLogs", + "azure.ai.projects.aio.operations.AgentsOperations.stream_agent_container_logs": "Azure.AI.Projects.Agents.streamAgentContainerLogs", "azure.ai.projects.operations.MemoryStoresOperations.create": "Azure.AI.Projects.MemoryStores.createMemoryStore", "azure.ai.projects.aio.operations.MemoryStoresOperations.create": "Azure.AI.Projects.MemoryStores.createMemoryStore", "azure.ai.projects.operations.MemoryStoresOperations.update": "Azure.AI.Projects.MemoryStores.updateMemoryStore", diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/_types.py b/sdk/ai/azure-ai-projects/azure/ai/projects/_types.py index 5e23b3911701..380d39c39ab4 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/_types.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/_types.py @@ -10,4 +10,5 @@ if TYPE_CHECKING: from . import models as _models +ComparisonFilterValueItems = Union[str, float] Filters = Union["_models.ComparisonFilter", "_models.CompoundFilter"] diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/_utils/model_base.py b/sdk/ai/azure-ai-projects/azure/ai/projects/_utils/model_base.py index 03b8c4ce34a0..e0637b7cfdc4 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/_utils/model_base.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/_utils/model_base.py @@ -37,6 +37,7 @@ TZ_UTC = timezone.utc _T = typing.TypeVar("_T") +_NONE_TYPE = type(None) def _timedelta_as_isostr(td: timedelta) -> str: @@ -171,6 +172,21 @@ def default(self, o): # pylint: disable=too-many-return-statements r"(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\s\d{4}\s\d{2}:\d{2}:\d{2}\sGMT" ) +_ARRAY_ENCODE_MAPPING = { + "pipeDelimited": "|", + "spaceDelimited": " ", + "commaDelimited": ",", + "newlineDelimited": "\n", +} + + +def _deserialize_array_encoded(delimit: str, attr): + if isinstance(attr, str): + if attr == "": + return [] + return attr.split(delimit) + return attr + def _deserialize_datetime(attr: typing.Union[str, datetime]) -> datetime: """Deserialize ISO-8601 formatted string into Datetime object. @@ -202,7 +218,7 @@ def _deserialize_datetime(attr: typing.Union[str, datetime]) -> datetime: test_utc = date_obj.utctimetuple() if test_utc.tm_year > 9999 or test_utc.tm_year < 1: raise OverflowError("Hit max or min date") - return date_obj + return date_obj # type: ignore[no-any-return] def _deserialize_datetime_rfc7231(attr: typing.Union[str, datetime]) -> datetime: @@ -256,7 +272,7 @@ def _deserialize_time(attr: typing.Union[str, time]) -> time: """ if isinstance(attr, time): return attr - return isodate.parse_time(attr) + return isodate.parse_time(attr) # type: ignore[no-any-return] def _deserialize_bytes(attr): @@ -315,6 +331,8 @@ def _deserialize_int_as_str(attr): def get_deserializer(annotation: typing.Any, rf: typing.Optional["_RestField"] = None): if annotation is int and rf and rf._format == "str": return _deserialize_int_as_str + if annotation is str and rf and rf._format in _ARRAY_ENCODE_MAPPING: + return functools.partial(_deserialize_array_encoded, _ARRAY_ENCODE_MAPPING[rf._format]) if rf and rf._format: return _DESERIALIZE_MAPPING_WITHFORMAT.get(rf._format) return _DESERIALIZE_MAPPING.get(annotation) # pyright: ignore @@ -353,9 +371,39 @@ def __contains__(self, key: typing.Any) -> bool: return key in self._data def __getitem__(self, key: str) -> typing.Any: + # If this key has been deserialized (for mutable types), we need to handle serialization + if hasattr(self, "_attr_to_rest_field"): + cache_attr = f"_deserialized_{key}" + if hasattr(self, cache_attr): + rf = _get_rest_field(getattr(self, "_attr_to_rest_field"), key) + if rf: + value = self._data.get(key) + if isinstance(value, (dict, list, set)): + # For mutable types, serialize and return + # But also update _data with serialized form and clear flag + # so mutations via this returned value affect _data + serialized = _serialize(value, rf._format) + # If serialized form is same type (no transformation needed), + # return _data directly so mutations work + if isinstance(serialized, type(value)) and serialized == value: + return self._data.get(key) + # Otherwise return serialized copy and clear flag + try: + object.__delattr__(self, cache_attr) + except AttributeError: + pass + # Store serialized form back + self._data[key] = serialized + return serialized return self._data.__getitem__(key) def __setitem__(self, key: str, value: typing.Any) -> None: + # Clear any cached deserialized value when setting through dictionary access + cache_attr = f"_deserialized_{key}" + try: + object.__delattr__(self, cache_attr) + except AttributeError: + pass self._data.__setitem__(key, value) def __delitem__(self, key: str) -> None: @@ -483,6 +531,8 @@ def _is_model(obj: typing.Any) -> bool: def _serialize(o, format: typing.Optional[str] = None): # pylint: disable=too-many-return-statements if isinstance(o, list): + if format in _ARRAY_ENCODE_MAPPING and all(isinstance(x, str) for x in o): + return _ARRAY_ENCODE_MAPPING[format].join(o) return [_serialize(x, format) for x in o] if isinstance(o, dict): return {k: _serialize(v, format) for k, v in o.items()} @@ -638,6 +688,10 @@ def __new__(cls, *args: typing.Any, **kwargs: typing.Any) -> Self: if not rf._rest_name_input: rf._rest_name_input = attr cls._attr_to_rest_field: dict[str, _RestField] = dict(attr_to_rest_field.items()) + cls._backcompat_attr_to_rest_field: dict[str, _RestField] = { + Model._get_backcompat_attribute_name(cls._attr_to_rest_field, attr): rf + for attr, rf in cls._attr_to_rest_field.items() + } cls._calculated.add(f"{cls.__module__}.{cls.__qualname__}") return super().__new__(cls) @@ -647,6 +701,16 @@ def __init_subclass__(cls, discriminator: typing.Optional[str] = None) -> None: if hasattr(base, "__mapping__"): base.__mapping__[discriminator or cls.__name__] = cls # type: ignore + @classmethod + def _get_backcompat_attribute_name(cls, attr_to_rest_field: dict[str, "_RestField"], attr_name: str) -> str: + rest_field_obj = attr_to_rest_field.get(attr_name) # pylint: disable=protected-access + if rest_field_obj is None: + return attr_name + original_tsp_name = getattr(rest_field_obj, "_original_tsp_name", None) # pylint: disable=protected-access + if original_tsp_name: + return original_tsp_name + return attr_name + @classmethod def _get_discriminator(cls, exist_discriminators) -> typing.Optional["_RestField"]: for v in cls.__dict__.values(): @@ -758,6 +822,14 @@ def _deserialize_multiple_sequence( return type(obj)(_deserialize(deserializer, entry, module) for entry, deserializer in zip(obj, entry_deserializers)) +def _is_array_encoded_deserializer(deserializer: functools.partial) -> bool: + return ( + isinstance(deserializer, functools.partial) + and isinstance(deserializer.args[0], functools.partial) + and deserializer.args[0].func == _deserialize_array_encoded # pylint: disable=comparison-with-callable + ) + + def _deserialize_sequence( deserializer: typing.Optional[typing.Callable], module: typing.Optional[str], @@ -767,6 +839,19 @@ def _deserialize_sequence( return obj if isinstance(obj, ET.Element): obj = list(obj) + + # encoded string may be deserialized to sequence + if isinstance(obj, str) and isinstance(deserializer, functools.partial): + # for list[str] + if _is_array_encoded_deserializer(deserializer): + return deserializer(obj) + + # for list[Union[...]] + if isinstance(deserializer.args[0], list): + for sub_deserializer in deserializer.args[0]: + if _is_array_encoded_deserializer(sub_deserializer): + return sub_deserializer(obj) + return type(obj)(_deserialize(deserializer, entry, module) for entry in obj) @@ -817,16 +902,16 @@ def _get_deserialize_callable_from_annotation( # pylint: disable=too-many-retur # is it optional? try: - if any(a for a in annotation.__args__ if a == type(None)): # pyright: ignore + if any(a is _NONE_TYPE for a in annotation.__args__): # pyright: ignore if len(annotation.__args__) <= 2: # pyright: ignore if_obj_deserializer = _get_deserialize_callable_from_annotation( - next(a for a in annotation.__args__ if a != type(None)), module, rf # pyright: ignore + next(a for a in annotation.__args__ if a is not _NONE_TYPE), module, rf # pyright: ignore ) return functools.partial(_deserialize_with_optional, if_obj_deserializer) # the type is Optional[Union[...]], we need to remove the None type from the Union annotation_copy = copy.copy(annotation) - annotation_copy.__args__ = [a for a in annotation_copy.__args__ if a != type(None)] # pyright: ignore + annotation_copy.__args__ = [a for a in annotation_copy.__args__ if a is not _NONE_TYPE] # pyright: ignore return _get_deserialize_callable_from_annotation(annotation_copy, module, rf) except AttributeError: pass @@ -972,6 +1057,7 @@ def _failsafe_deserialize_xml( return None +# pylint: disable=too-many-instance-attributes class _RestField: def __init__( self, @@ -984,6 +1070,7 @@ def __init__( format: typing.Optional[str] = None, is_multipart_file_input: bool = False, xml: typing.Optional[dict[str, typing.Any]] = None, + original_tsp_name: typing.Optional[str] = None, ): self._type = type self._rest_name_input = name @@ -995,10 +1082,15 @@ def __init__( self._format = format self._is_multipart_file_input = is_multipart_file_input self._xml = xml if xml is not None else {} + self._original_tsp_name = original_tsp_name @property def _class_type(self) -> typing.Any: - return getattr(self._type, "args", [None])[0] + result = getattr(self._type, "args", [None])[0] + # type may be wrapped by nested functools.partial so we need to check for that + if isinstance(result, functools.partial): + return getattr(result, "args", [None])[0] + return result @property def _rest_name(self) -> str: @@ -1009,14 +1101,37 @@ def _rest_name(self) -> str: def __get__(self, obj: Model, type=None): # pylint: disable=redefined-builtin # by this point, type and rest_name will have a value bc we default # them in __new__ of the Model class - item = obj.get(self._rest_name) + # Use _data.get() directly to avoid triggering __getitem__ which clears the cache + item = obj._data.get(self._rest_name) if item is None: return item if self._is_model: return item - return _deserialize(self._type, _serialize(item, self._format), rf=self) + + # For mutable types, we want mutations to directly affect _data + # Check if we've already deserialized this value + cache_attr = f"_deserialized_{self._rest_name}" + if hasattr(obj, cache_attr): + # Return the value from _data directly (it's been deserialized in place) + return obj._data.get(self._rest_name) + + deserialized = _deserialize(self._type, _serialize(item, self._format), rf=self) + + # For mutable types, store the deserialized value back in _data + # so mutations directly affect _data + if isinstance(deserialized, (dict, list, set)): + obj._data[self._rest_name] = deserialized + object.__setattr__(obj, cache_attr, True) # Mark as deserialized + return deserialized + + return deserialized def __set__(self, obj: Model, value) -> None: + # Clear the cached deserialized object when setting a new value + cache_attr = f"_deserialized_{self._rest_name}" + if hasattr(obj, cache_attr): + object.__delattr__(obj, cache_attr) + if value is None: # we want to wipe out entries if users set attr to None try: @@ -1046,6 +1161,7 @@ def rest_field( format: typing.Optional[str] = None, is_multipart_file_input: bool = False, xml: typing.Optional[dict[str, typing.Any]] = None, + original_tsp_name: typing.Optional[str] = None, ) -> typing.Any: return _RestField( name=name, @@ -1055,6 +1171,7 @@ def rest_field( format=format, is_multipart_file_input=is_multipart_file_input, xml=xml, + original_tsp_name=original_tsp_name, ) @@ -1184,7 +1301,7 @@ def _get_wrapped_element( _get_element(v, exclude_readonly, meta, wrapped_element) else: wrapped_element.text = _get_primitive_type_value(v) - return wrapped_element + return wrapped_element # type: ignore[no-any-return] def _get_primitive_type_value(v) -> str: @@ -1197,7 +1314,9 @@ def _get_primitive_type_value(v) -> str: return str(v) -def _create_xml_element(tag, prefix=None, ns=None): +def _create_xml_element( + tag: typing.Any, prefix: typing.Optional[str] = None, ns: typing.Optional[str] = None +) -> ET.Element: if prefix and ns: ET.register_namespace(prefix, ns) if ns: diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/_utils/serialization.py b/sdk/ai/azure-ai-projects/azure/ai/projects/_utils/serialization.py index 45a3e44e45cb..81ec1de5922b 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/_utils/serialization.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/_utils/serialization.py @@ -821,13 +821,20 @@ def serialize_basic(cls, data, data_type, **kwargs): :param str data_type: Type of object in the iterable. :rtype: str, int, float, bool :return: serialized object + :raises TypeError: raise if data_type is not one of str, int, float, bool. """ custom_serializer = cls._get_custom_serializers(data_type, **kwargs) if custom_serializer: return custom_serializer(data) if data_type == "str": return cls.serialize_unicode(data) - return eval(data_type)(data) # nosec # pylint: disable=eval-used + if data_type == "int": + return int(data) + if data_type == "float": + return float(data) + if data_type == "bool": + return bool(data) + raise TypeError("Unknown basic data type: {}".format(data_type)) @classmethod def serialize_unicode(cls, data): @@ -1757,7 +1764,7 @@ def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return :param str data_type: deserialization data type. :return: Deserialized basic type. :rtype: str, int, float or bool - :raises TypeError: if string format is not valid. + :raises TypeError: if string format is not valid or data_type is not one of str, int, float, bool. """ # If we're here, data is supposed to be a basic type. # If it's still an XML node, take the text @@ -1783,7 +1790,11 @@ def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return if data_type == "str": return self.deserialize_unicode(attr) - return eval(data_type)(attr) # nosec # pylint: disable=eval-used + if data_type == "int": + return int(attr) + if data_type == "float": + return float(attr) + raise TypeError("Unknown basic data type: {}".format(data_type)) @staticmethod def deserialize_unicode(data): diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_operations.py b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_operations.py index 0bb1dcad35d4..c5bbefb67fe9 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_operations.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=line-too-long,useless-suppression,too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -35,7 +35,6 @@ from ... import models as _models from ..._utils.model_base import SdkJSONEncoder, _deserialize, _failsafe_deserialize from ..._utils.serialization import Deserializer, Serializer -from ..._validation import api_version_validation from ...operations._operations import ( build_agents_create_from_manifest_request, build_agents_create_request, @@ -47,6 +46,7 @@ build_agents_get_version_request, build_agents_list_request, build_agents_list_versions_request, + build_agents_stream_agent_container_logs_request, build_agents_update_from_manifest_request, build_agents_update_request, build_connections_get_request, @@ -129,11 +129,6 @@ def __init__(self, *args, **kwargs) -> None: self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace_async - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "agent_name", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) async def get(self, agent_name: str, **kwargs: Any) -> _models.AgentDetails: """Retrieves the agent. @@ -270,11 +265,6 @@ async def create( """ @distributed_trace_async - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "content_type", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) async def create( self, body: Union[JSON, IO[bytes]] = _Unset, @@ -457,11 +447,6 @@ async def update( """ @distributed_trace_async - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "agent_name", "content_type", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) async def update( self, agent_name: str, @@ -640,11 +625,6 @@ async def create_from_manifest( """ @distributed_trace_async - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "content_type", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) async def create_from_manifest( self, body: Union[JSON, IO[bytes]] = _Unset, @@ -844,11 +824,6 @@ async def update_from_manifest( """ @distributed_trace_async - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "agent_name", "content_type", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) async def update_from_manifest( self, agent_name: str, @@ -963,11 +938,6 @@ async def update_from_manifest( return deserialized # type: ignore @distributed_trace_async - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "agent_name", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) async def delete(self, agent_name: str, **kwargs: Any) -> _models.DeleteAgentResponse: """Deletes an agent. @@ -1032,17 +1002,12 @@ async def delete(self, agent_name: str, **kwargs: Any) -> _models.DeleteAgentRes return deserialized # type: ignore @distributed_trace - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "kind", "limit", "order", "after", "before", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) def list( self, *, kind: Optional[Union[str, _models.AgentKind]] = None, limit: Optional[int] = None, - order: Optional[Literal["asc", "desc"]] = None, + order: Optional[Union[str, _models.PageOrder]] = None, before: Optional[str] = None, **kwargs: Any ) -> AsyncItemPaged["_models.AgentDetails"]: @@ -1057,9 +1022,8 @@ def list( :paramtype limit: int :keyword order: Sort order by the ``created_at`` timestamp of the objects. ``asc`` for ascending order and``desc`` - for descending order. Is either a Literal["asc"] type or a Literal["desc"] type. Default value - is None. - :paramtype order: str or str + for descending order. Known values are: "asc" and "desc". Default value is None. + :paramtype order: str or ~azure.ai.projects.models.PageOrder :keyword before: A cursor for use in pagination. ``before`` is an object ID that defines your place in the list. For instance, if you make a list request and receive 100 objects, ending with obj_foo, your @@ -1216,11 +1180,6 @@ async def create_version( """ @distributed_trace_async - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "agent_name", "content_type", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) async def create_version( self, agent_name: str, @@ -1417,11 +1376,6 @@ async def create_version_from_manifest( """ @distributed_trace_async - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "agent_name", "content_type", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) async def create_version_from_manifest( self, agent_name: str, @@ -1539,11 +1493,6 @@ async def create_version_from_manifest( return deserialized # type: ignore @distributed_trace_async - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "agent_name", "agent_version", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) async def get_version(self, agent_name: str, agent_version: str, **kwargs: Any) -> _models.AgentVersionDetails: """Retrieves a specific version of an agent. @@ -1611,11 +1560,6 @@ async def get_version(self, agent_name: str, agent_version: str, **kwargs: Any) return deserialized # type: ignore @distributed_trace_async - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "agent_name", "agent_version", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) async def delete_version( self, agent_name: str, agent_version: str, **kwargs: Any ) -> _models.DeleteAgentVersionResponse: @@ -1686,19 +1630,12 @@ async def delete_version( return deserialized # type: ignore @distributed_trace - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={ - "2025-11-15-preview": ["api_version", "agent_name", "limit", "order", "after", "before", "accept"] - }, - api_versions_list=["2025-11-15-preview"], - ) def list_versions( self, agent_name: str, *, limit: Optional[int] = None, - order: Optional[Literal["asc", "desc"]] = None, + order: Optional[Union[str, _models.PageOrder]] = None, before: Optional[str] = None, **kwargs: Any ) -> AsyncItemPaged["_models.AgentVersionDetails"]: @@ -1712,9 +1649,8 @@ def list_versions( :paramtype limit: int :keyword order: Sort order by the ``created_at`` timestamp of the objects. ``asc`` for ascending order and``desc`` - for descending order. Is either a Literal["asc"] type or a Literal["desc"] type. Default value - is None. - :paramtype order: str or str + for descending order. Known values are: "asc" and "desc". Default value is None. + :paramtype order: str or ~azure.ai.projects.models.PageOrder :keyword before: A cursor for use in pagination. ``before`` is an object ID that defines your place in the list. For instance, if you make a list request and receive 100 objects, ending with obj_foo, your @@ -1784,6 +1720,106 @@ async def get_next(_continuation_token=None): return AsyncItemPaged(get_next, extract_data) + @distributed_trace_async + async def stream_agent_container_logs( + self, + agent_name: str, + agent_version: str, + *, + kind: Optional[Union[str, _models.ContainerLogKind]] = None, + replica_name: Optional[str] = None, + tail: Optional[int] = None, + **kwargs: Any + ) -> None: + """Container log entry streamed from the container as text chunks. + Each chunk is a UTF-8 string that may be either a plain text log line + or a JSON-formatted log entry, depending on the type of container log being streamed. + Clients should treat each chunk as opaque text and, if needed, attempt + to parse it as JSON based on their logging requirements. + + For system logs, the format is JSON with the following structure: + {"TimeStamp":"2025-12-15T16:51:33Z","Type":"Normal","ContainerAppName":null,"RevisionName":null,"ReplicaName":null,"Msg":"Connecting + to the events + collector...","Reason":"StartingGettingEvents","EventSource":"ContainerAppController","Count":1} + {"TimeStamp":"2025-12-15T16:51:34Z","Type":"Normal","ContainerAppName":null,"RevisionName":null,"ReplicaName":null,"Msg":"Successfully + connected to events + server","Reason":"ConnectedToEventsServer","EventSource":"ContainerAppController","Count":1} + + For console logs, the format is plain text as emitted by the container's stdout/stderr. + 2025-12-15T08:43:48.72656 Connecting to the container 'agent-container'... + 2025-12-15T08:43:48.75451 Successfully Connected to container: 'agent-container' [Revision: + 'je90fe655aa742ef9a188b9fd14d6764--7tca06b', Replica: + 'je90fe655aa742ef9a188b9fd14d6764--7tca06b-6898b9c89f-mpkjc'] + 2025-12-15T08:33:59.0671054Z stdout F INFO: 127.0.0.1:42588 - "GET /readiness HTTP/1.1" 200 + OK + 2025-12-15T08:34:29.0649033Z stdout F INFO: 127.0.0.1:60246 - "GET /readiness HTTP/1.1" 200 + OK + 2025-12-15T08:34:59.0644467Z stdout F INFO: 127.0.0.1:43994 - "GET /readiness HTTP/1.1" 200 + OK. + + :param agent_name: The name of the agent. Required. + :type agent_name: str + :param agent_version: The version of the agent. Required. + :type agent_version: str + :keyword kind: console returns container stdout/stderr, system returns container app event + stream. defaults to console. Known values are: "console" and "system". Default value is None. + :paramtype kind: str or ~azure.ai.projects.models.ContainerLogKind + :keyword replica_name: When omitted, the server chooses the first replica for console logs. + Required to target a specific replica. Default value is None. + :paramtype replica_name: str + :keyword tail: Number of trailing lines returned. Enforced to 1-300. Defaults to 20. Default + value is None. + :paramtype tail: int + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_agents_stream_agent_container_logs_request( + agent_name=agent_name, + agent_version=agent_version, + kind=kind, + replica_name=replica_name, + tail=tail, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ApiErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + class MemoryStoresOperations: """ @@ -1865,11 +1901,6 @@ async def create( """ @distributed_trace_async - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "content_type", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) async def create( self, body: Union[JSON, IO[bytes]] = _Unset, @@ -2031,11 +2062,6 @@ async def update( """ @distributed_trace_async - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "name", "content_type", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) async def update( self, name: str, @@ -2128,11 +2154,6 @@ async def update( return deserialized # type: ignore @distributed_trace_async - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "name", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) async def get(self, name: str, **kwargs: Any) -> _models.MemoryStoreDetails: """Retrieve a memory store. @@ -2197,16 +2218,11 @@ async def get(self, name: str, **kwargs: Any) -> _models.MemoryStoreDetails: return deserialized # type: ignore @distributed_trace - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "limit", "order", "after", "before", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) def list( self, *, limit: Optional[int] = None, - order: Optional[Literal["asc", "desc"]] = None, + order: Optional[Union[str, _models.PageOrder]] = None, before: Optional[str] = None, **kwargs: Any ) -> AsyncItemPaged["_models.MemoryStoreDetails"]: @@ -2218,9 +2234,8 @@ def list( :paramtype limit: int :keyword order: Sort order by the ``created_at`` timestamp of the objects. ``asc`` for ascending order and``desc`` - for descending order. Is either a Literal["asc"] type or a Literal["desc"] type. Default value - is None. - :paramtype order: str or str + for descending order. Known values are: "asc" and "desc". Default value is None. + :paramtype order: str or ~azure.ai.projects.models.PageOrder :keyword before: A cursor for use in pagination. ``before`` is an object ID that defines your place in the list. For instance, if you make a list request and receive 100 objects, ending with obj_foo, your @@ -2290,11 +2305,6 @@ async def get_next(_continuation_token=None): return AsyncItemPaged(get_next, extract_data) @distributed_trace_async - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "name", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) async def delete(self, name: str, **kwargs: Any) -> _models.DeleteMemoryStoreResult: """Delete a memory store. @@ -2365,7 +2375,7 @@ async def search_memories( *, scope: str, content_type: str = "application/json", - items: Optional[List[_models.ItemParam]] = None, + items: Optional[List[_models.InputItem]] = None, previous_search_id: Optional[str] = None, options: Optional[_models.MemorySearchOptions] = None, **kwargs: Any @@ -2381,7 +2391,7 @@ async def search_memories( Default value is "application/json". :paramtype content_type: str :keyword items: Items for which to search for relevant memories. Default value is None. - :paramtype items: list[~azure.ai.projects.models.ItemParam] + :paramtype items: list[~azure.ai.projects.models.InputItem] :keyword previous_search_id: The unique ID of the previous search request, enabling incremental memory search from where the last operation left off. Default value is None. :paramtype previous_search_id: str @@ -2429,18 +2439,13 @@ async def search_memories( """ @distributed_trace_async - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "name", "content_type", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) async def search_memories( self, name: str, body: Union[JSON, IO[bytes]] = _Unset, *, scope: str = _Unset, - items: Optional[List[_models.ItemParam]] = None, + items: Optional[List[_models.InputItem]] = None, previous_search_id: Optional[str] = None, options: Optional[_models.MemorySearchOptions] = None, **kwargs: Any @@ -2455,7 +2460,7 @@ async def search_memories( Required. :paramtype scope: str :keyword items: Items for which to search for relevant memories. Default value is None. - :paramtype items: list[~azure.ai.projects.models.ItemParam] + :paramtype items: list[~azure.ai.projects.models.InputItem] :keyword previous_search_id: The unique ID of the previous search request, enabling incremental memory search from where the last operation left off. Default value is None. :paramtype previous_search_id: str @@ -2539,18 +2544,13 @@ async def search_memories( return deserialized # type: ignore - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "name", "content_type", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) async def _update_memories_initial( self, name: str, body: Union[JSON, IO[bytes]] = _Unset, *, scope: str = _Unset, - items: Optional[List[_models.ItemParam]] = None, + items: Optional[List[_models.InputItem]] = None, previous_update_id: Optional[str] = None, update_delay: Optional[int] = None, **kwargs: Any @@ -2635,7 +2635,7 @@ async def _begin_update_memories( *, scope: str, content_type: str = "application/json", - items: Optional[List[_models.ItemParam]] = None, + items: Optional[List[_models.InputItem]] = None, previous_update_id: Optional[str] = None, update_delay: Optional[int] = None, **kwargs: Any @@ -2650,18 +2650,13 @@ async def _begin_update_memories( ) -> AsyncLROPoller[_models.MemoryStoreUpdateCompletedResult]: ... @distributed_trace_async - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "name", "content_type", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) async def _begin_update_memories( self, name: str, body: Union[JSON, IO[bytes]] = _Unset, *, scope: str = _Unset, - items: Optional[List[_models.ItemParam]] = None, + items: Optional[List[_models.InputItem]] = None, previous_update_id: Optional[str] = None, update_delay: Optional[int] = None, **kwargs: Any @@ -2676,7 +2671,7 @@ async def _begin_update_memories( Required. :paramtype scope: str :keyword items: Conversation items from which to extract memories. Default value is None. - :paramtype items: list[~azure.ai.projects.models.ItemParam] + :paramtype items: list[~azure.ai.projects.models.InputItem] :keyword previous_update_id: The unique ID of the previous update request, enabling incremental memory updates from where the last operation left off. Default value is None. :paramtype previous_update_id: str @@ -2812,11 +2807,6 @@ async def delete_scope( """ @distributed_trace_async - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "name", "content_type", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) async def delete_scope( self, name: str, body: Union[JSON, IO[bytes]] = _Unset, *, scope: str = _Unset, **kwargs: Any ) -> _models.MemoryStoreDeleteScopeResult: @@ -4482,11 +4472,6 @@ def __init__(self, *args, **kwargs) -> None: self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace_async - @api_version_validation( - method_added_on="2025-05-15-preview", - params_added_on={"2025-05-15-preview": ["api_version", "name", "client_request_id", "accept"]}, - api_versions_list=["2025-05-15-preview", "2025-11-15-preview"], - ) async def get(self, name: str, **kwargs: Any) -> _models.RedTeam: """Get a redteam by name. @@ -4552,11 +4537,6 @@ async def get(self, name: str, **kwargs: Any) -> _models.RedTeam: return deserialized # type: ignore @distributed_trace - @api_version_validation( - method_added_on="2025-05-15-preview", - params_added_on={"2025-05-15-preview": ["api_version", "client_request_id", "accept"]}, - api_versions_list=["2025-05-15-preview", "2025-11-15-preview"], - ) def list(self, **kwargs: Any) -> AsyncItemPaged["_models.RedTeam"]: """List a redteam by name. @@ -4685,11 +4665,6 @@ async def create( """ @distributed_trace_async - @api_version_validation( - method_added_on="2025-05-15-preview", - params_added_on={"2025-05-15-preview": ["api_version", "content_type", "accept"]}, - api_versions_list=["2025-05-15-preview", "2025-11-15-preview"], - ) async def create(self, red_team: Union[_models.RedTeam, JSON, IO[bytes]], **kwargs: Any) -> _models.RedTeam: """Creates a redteam run. @@ -4778,11 +4753,6 @@ def __init__(self, *args, **kwargs) -> None: self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace_async - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "id", "client_request_id", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) async def get(self, id: str, **kwargs: Any) -> _models.EvaluationRule: """Get an evaluation rule. @@ -4848,11 +4818,6 @@ async def get(self, id: str, **kwargs: Any) -> _models.EvaluationRule: return deserialized # type: ignore @distributed_trace_async - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "id", "client_request_id"]}, - api_versions_list=["2025-11-15-preview"], - ) async def delete(self, id: str, **kwargs: Any) -> None: """Delete an evaluation rule. @@ -4960,11 +4925,6 @@ async def create_or_update( """ @distributed_trace_async - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "id", "content_type", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) async def create_or_update( self, id: str, evaluation_rule: Union[_models.EvaluationRule, JSON, IO[bytes]], **kwargs: Any ) -> _models.EvaluationRule: @@ -5040,13 +5000,6 @@ async def create_or_update( return deserialized # type: ignore @distributed_trace - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={ - "2025-11-15-preview": ["api_version", "action_type", "agent_name", "enabled", "client_request_id", "accept"] - }, - api_versions_list=["2025-11-15-preview"], - ) def list( self, *, @@ -5164,11 +5117,6 @@ def __init__(self, *args, **kwargs) -> None: self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace_async - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "name", "client_request_id", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) async def get(self, name: str, **kwargs: Any) -> _models.EvaluationTaxonomy: """Get an evaluation run by name. @@ -5234,13 +5182,6 @@ async def get(self, name: str, **kwargs: Any) -> _models.EvaluationTaxonomy: return deserialized # type: ignore @distributed_trace - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={ - "2025-11-15-preview": ["api_version", "input_name", "input_type", "client_request_id", "accept"] - }, - api_versions_list=["2025-11-15-preview"], - ) def list( self, *, input_name: Optional[str] = None, input_type: Optional[str] = None, **kwargs: Any ) -> AsyncItemPaged["_models.EvaluationTaxonomy"]: @@ -5331,11 +5272,6 @@ async def get_next(next_link=None): return AsyncItemPaged(get_next, extract_data) @distributed_trace_async - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "name", "client_request_id"]}, - api_versions_list=["2025-11-15-preview"], - ) async def delete(self, name: str, **kwargs: Any) -> None: """Delete an evaluation taxonomy by name. @@ -5443,11 +5379,6 @@ async def create( """ @distributed_trace_async - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "name", "content_type", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) async def create( self, name: str, body: Union[_models.EvaluationTaxonomy, JSON, IO[bytes]], **kwargs: Any ) -> _models.EvaluationTaxonomy: @@ -5577,11 +5508,6 @@ async def update( """ @distributed_trace_async - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "name", "content_type", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) async def update( self, name: str, body: Union[_models.EvaluationTaxonomy, JSON, IO[bytes]], **kwargs: Any ) -> _models.EvaluationTaxonomy: @@ -5675,11 +5601,6 @@ def __init__(self, *args, **kwargs) -> None: self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "name", "type", "limit", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) def list_versions( self, name: str, @@ -5781,11 +5702,6 @@ async def get_next(next_link=None): return AsyncItemPaged(get_next, extract_data) @distributed_trace - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "type", "limit", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) def list_latest_versions( self, *, @@ -5883,11 +5799,6 @@ async def get_next(next_link=None): return AsyncItemPaged(get_next, extract_data) @distributed_trace_async - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "name", "version", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) async def get_version(self, name: str, version: str, **kwargs: Any) -> _models.EvaluatorVersion: """Get the specific version of the EvaluatorVersion. The service returns 404 Not Found error if the EvaluatorVersion does not exist. @@ -5952,11 +5863,6 @@ async def get_version(self, name: str, version: str, **kwargs: Any) -> _models.E return deserialized # type: ignore @distributed_trace_async - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "name", "version"]}, - api_versions_list=["2025-11-15-preview"], - ) async def delete_version(self, name: str, version: str, **kwargs: Any) -> None: """Delete the specific version of the EvaluatorVersion. The service returns 204 No Content if the EvaluatorVersion was deleted successfully or if the EvaluatorVersion does not exist. @@ -6068,11 +5974,6 @@ async def create_version( """ @distributed_trace_async - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "name", "content_type", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) async def create_version( self, name: str, evaluator_version: Union[_models.EvaluatorVersion, JSON, IO[bytes]], **kwargs: Any ) -> _models.EvaluatorVersion: @@ -6220,11 +6121,6 @@ async def update_version( """ @distributed_trace_async - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "name", "version", "content_type", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) async def update_version( self, name: str, @@ -6376,19 +6272,6 @@ async def generate( """ @distributed_trace_async - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={ - "2025-11-15-preview": [ - "api_version", - "repeatability_request_id", - "repeatability_first_sent", - "content_type", - "accept", - ] - }, - api_versions_list=["2025-11-15-preview"], - ) async def generate(self, insight: Union[_models.Insight, JSON, IO[bytes]], **kwargs: Any) -> _models.Insight: """Generate Insights. @@ -6459,13 +6342,6 @@ async def generate(self, insight: Union[_models.Insight, JSON, IO[bytes]], **kwa return deserialized # type: ignore @distributed_trace_async - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={ - "2025-11-15-preview": ["api_version", "id", "include_coordinates", "client_request_id", "accept"] - }, - api_versions_list=["2025-11-15-preview"], - ) async def get(self, id: str, *, include_coordinates: Optional[bool] = None, **kwargs: Any) -> _models.Insight: """Get a specific insight by Id. @@ -6535,22 +6411,6 @@ async def get(self, id: str, *, include_coordinates: Optional[bool] = None, **kw return deserialized # type: ignore @distributed_trace - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={ - "2025-11-15-preview": [ - "api_version", - "type", - "eval_id", - "run_id", - "agent_name", - "include_coordinates", - "client_request_id", - "accept", - ] - }, - api_versions_list=["2025-11-15-preview"], - ) def list( self, *, @@ -6677,11 +6537,6 @@ def __init__(self, *args, **kwargs) -> None: self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace_async - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "id", "client_request_id"]}, - api_versions_list=["2025-11-15-preview"], - ) async def delete(self, id: str, **kwargs: Any) -> None: """Delete a schedule. @@ -6735,11 +6590,6 @@ async def delete(self, id: str, **kwargs: Any) -> None: return cls(pipeline_response, None, response_headers) # type: ignore @distributed_trace_async - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "id", "client_request_id", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) async def get(self, id: str, **kwargs: Any) -> _models.Schedule: """Get a schedule by id. @@ -6805,11 +6655,6 @@ async def get(self, id: str, **kwargs: Any) -> _models.Schedule: return deserialized # type: ignore @distributed_trace - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "client_request_id", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) def list(self, **kwargs: Any) -> AsyncItemPaged["_models.Schedule"]: """List all schedules. @@ -6946,11 +6791,6 @@ async def create_or_update( """ @distributed_trace_async - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "id", "content_type", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) async def create_or_update( self, id: str, schedule: Union[_models.Schedule, JSON, IO[bytes]], **kwargs: Any ) -> _models.Schedule: @@ -7026,11 +6866,6 @@ async def create_or_update( return deserialized # type: ignore @distributed_trace_async - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "schedule_id", "run_id", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) async def get_run(self, schedule_id: str, run_id: str, **kwargs: Any) -> _models.ScheduleRun: """Get a schedule run by id. @@ -7094,11 +6929,6 @@ async def get_run(self, schedule_id: str, run_id: str, **kwargs: Any) -> _models return deserialized # type: ignore @distributed_trace - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "id", "client_request_id", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) def list_runs(self, id: str, **kwargs: Any) -> AsyncItemPaged["_models.ScheduleRun"]: """List all schedule runs. diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch_memories_async.py b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch_memories_async.py index 61b4ac43a42f..4f1343541de5 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch_memories_async.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch_memories_async.py @@ -14,8 +14,8 @@ from ... import models as _models from ...models import ( MemoryStoreOperationUsage, - MemoryStoreOperationUsageInputTokensDetails, - MemoryStoreOperationUsageOutputTokensDetails, + ResponseUsageInputTokensDetails, + ResponseUsageOutputTokensDetails, MemoryStoreUpdateCompletedResult, AsyncUpdateMemoriesLROPoller, AsyncUpdateMemoriesLROPollingMethod, @@ -34,7 +34,7 @@ async def begin_update_memories( *, scope: str, content_type: str = "application/json", - items: Optional[List[_models.ItemParam]] = None, + items: Optional[List[_models.InputItem]] = None, previous_update_id: Optional[str] = None, update_delay: Optional[int] = None, **kwargs: Any, @@ -50,7 +50,7 @@ async def begin_update_memories( Default value is "application/json". :paramtype content_type: str :keyword items: Conversation items from which to extract memories. Default value is None. - :paramtype items: list[~azure.ai.projects.models.ItemParam] + :paramtype items: list[~azure.ai.projects.models.InputItem] :keyword previous_update_id: The unique ID of the previous update request, enabling incremental memory updates from where the last operation left off. Default value is None. :paramtype previous_update_id: str @@ -119,7 +119,7 @@ async def begin_update_memories( body: Union[JSON, IO[bytes]] = _Unset, *, scope: str = _Unset, - items: Optional[List[_models.ItemParam]] = None, + items: Optional[List[_models.InputItem]] = None, previous_update_id: Optional[str] = None, update_delay: Optional[int] = None, **kwargs: Any, @@ -134,7 +134,7 @@ async def begin_update_memories( Required. :paramtype scope: str :keyword items: Conversation items from which to extract memories. Default value is None. - :paramtype items: list[~azure.ai.projects.models.ItemParam] + :paramtype items: list[~azure.ai.projects.models.InputItem] :keyword previous_update_id: The unique ID of the previous update request, enabling incremental memory updates from where the last operation left off. Default value is None. :paramtype previous_update_id: str @@ -193,9 +193,9 @@ def get_long_running_output(pipeline_response): usage = MemoryStoreOperationUsage( embedding_tokens=0, input_tokens=0, - input_tokens_details=MemoryStoreOperationUsageInputTokensDetails(cached_tokens=0), + input_tokens_details=ResponseUsageInputTokensDetails(cached_tokens=0), output_tokens=0, - output_tokens_details=MemoryStoreOperationUsageOutputTokensDetails(reasoning_tokens=0), + output_tokens_details=ResponseUsageOutputTokensDetails(reasoning_tokens=0), total_tokens=0, ) deserialized = MemoryStoreUpdateCompletedResult(memory_operations=[], usage=usage) diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/models/__init__.py b/sdk/ai/azure-ai-projects/azure/ai/projects/models/__init__.py index a352198a33b9..2537d1bd8040 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/models/__init__.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/models/__init__.py @@ -14,7 +14,7 @@ from ._models import ( # type: ignore - A2ATool, + A2APreviewTool, AISearchIndexResource, AgentClusterInsightResult, AgentClusterInsightsRequest, @@ -27,75 +27,69 @@ AgentVersionDetails, AgenticIdentityCredentials, Annotation, - AnnotationFileCitation, - AnnotationFilePath, - AnnotationUrlCitation, ApiErrorResponse, ApiKeyCredentials, + ApplyPatchCreateFileOperation, + ApplyPatchCreateFileOperationParam, + ApplyPatchDeleteFileOperation, + ApplyPatchDeleteFileOperationParam, + ApplyPatchFileOperation, + ApplyPatchOperationParam, + ApplyPatchToolParam, + ApplyPatchUpdateFileOperation, + ApplyPatchUpdateFileOperationParam, ApproximateLocation, AzureAIAgentTarget, - AzureAISearchAgentTool, AzureAISearchIndex, + AzureAISearchTool, AzureAISearchToolResource, - AzureFunctionAgentTool, AzureFunctionBinding, AzureFunctionDefinition, AzureFunctionDefinitionFunction, AzureFunctionStorageQueue, + AzureFunctionTool, AzureOpenAIModelConfiguration, BaseCredentials, - BingCustomSearchAgentTool, BingCustomSearchConfiguration, + BingCustomSearchPreviewTool, BingCustomSearchToolParameters, - BingGroundingAgentTool, BingGroundingSearchConfiguration, BingGroundingSearchToolParameters, + BingGroundingTool, BlobReference, BlobReferenceSasCredential, - BrowserAutomationAgentTool, + BrowserAutomationPreviewTool, BrowserAutomationToolConnectionParameters, BrowserAutomationToolParameters, CaptureStructuredOutputsTool, ChartCoordinate, ChatSummaryMemoryItem, + ClickParam, ClusterInsightResult, ClusterTokenUsage, CodeBasedEvaluatorDefinition, - CodeInterpreterOutput, + CodeInterpreterContainerAuto, CodeInterpreterOutputImage, CodeInterpreterOutputLogs, CodeInterpreterTool, - CodeInterpreterToolAuto, - CodeInterpreterToolCallItemParam, - CodeInterpreterToolCallItemResource, ComparisonFilter, CompoundFilter, ComputerAction, - ComputerActionClick, - ComputerActionDoubleClick, - ComputerActionDrag, - ComputerActionKeyPress, - ComputerActionMove, - ComputerActionScreenshot, - ComputerActionScroll, - ComputerActionTypeKeys, - ComputerActionWait, - ComputerToolCallItemParam, - ComputerToolCallItemResource, - ComputerToolCallOutputItemOutput, - ComputerToolCallOutputItemOutputComputerScreenshot, - ComputerToolCallOutputItemParam, - ComputerToolCallOutputItemResource, - ComputerToolCallSafetyCheck, + ComputerCallSafetyCheckParam, + ComputerScreenshotImage, ComputerUsePreviewTool, Connection, ContainerAppAgentDefinition, + ContainerFileCitationBody, ContinuousEvaluationRuleAction, - Coordinate, CosmosDBIndex, CreatedBy, CronTrigger, CustomCredential, + CustomGrammarFormatParam, + CustomTextFormatParam, + CustomToolParam, + CustomToolParamFormat, DailyRecurrenceSchedule, DatasetCredential, DatasetVersion, @@ -103,6 +97,10 @@ DeleteAgentVersionResponse, DeleteMemoryStoreResult, Deployment, + DoubleClickAction, + Drag, + DragPoint, + EasyInputMessage, EmbeddingConfiguration, EntraIDCredentials, Error, @@ -126,26 +124,68 @@ EvaluatorVersion, FabricDataAgentToolParameters, FieldMapping, + FileCitationBody, FileDatasetVersion, + FilePath, FileSearchTool, - FileSearchToolCallItemParam, - FileSearchToolCallItemParamResult, - FileSearchToolCallItemResource, + FileSearchToolCallResults, FolderDatasetVersion, + FunctionAndCustomToolCallOutput, + FunctionAndCustomToolCallOutputInputFileContent, + FunctionAndCustomToolCallOutputInputImageContent, + FunctionAndCustomToolCallOutputInputTextContent, + FunctionShellAction, + FunctionShellActionParam, + FunctionShellCallOutputContent, + FunctionShellCallOutputContentParam, + FunctionShellCallOutputExitOutcome, + FunctionShellCallOutputExitOutcomeParam, + FunctionShellCallOutputOutcome, + FunctionShellCallOutputOutcomeParam, + FunctionShellCallOutputTimeoutOutcome, + FunctionShellCallOutputTimeoutOutcomeParam, + FunctionShellToolParam, FunctionTool, - FunctionToolCallItemParam, - FunctionToolCallItemResource, - FunctionToolCallOutputItemParam, - FunctionToolCallOutputItemResource, HostedAgentDefinition, HourlyRecurrenceSchedule, HumanEvaluationRuleAction, + HybridSearchOptions, ImageBasedHostedAgentDefinition, ImageGenTool, - ImageGenToolCallItemParam, - ImageGenToolCallItemResource, ImageGenToolInputImageMask, Index, + InputContent, + InputContentInputFileContent, + InputContentInputImageContent, + InputContentInputTextContent, + InputFileContentParam, + InputImageContentParamAutoParam, + InputItem, + InputItemApplyPatchToolCallItemParam, + InputItemApplyPatchToolCallOutputItemParam, + InputItemCodeInterpreterToolCall, + InputItemCompactionSummaryItemParam, + InputItemComputerCallOutputItemParam, + InputItemComputerToolCall, + InputItemCustomToolCall, + InputItemCustomToolCallOutput, + InputItemFileSearchToolCall, + InputItemFunctionCallOutputItemParam, + InputItemFunctionShellCallItemParam, + InputItemFunctionShellCallOutputItemParam, + InputItemFunctionToolCall, + InputItemImageGenToolCall, + InputItemLocalShellToolCall, + InputItemLocalShellToolCallOutput, + InputItemMcpApprovalRequest, + InputItemMcpApprovalResponse, + InputItemMcpListTools, + InputItemMcpToolCall, + InputItemOutputMessage, + InputItemReasoningItem, + InputItemWebSearchToolCall, + InputMessageResource, + InputTextContentParam, Insight, InsightCluster, InsightModelConfiguration, @@ -155,46 +195,43 @@ InsightScheduleTask, InsightSummary, InsightsMetadata, - ItemContent, - ItemContentInputAudio, - ItemContentInputFile, - ItemContentInputImage, - ItemContentInputText, - ItemContentOutputAudio, - ItemContentOutputText, - ItemContentRefusal, - ItemParam, - ItemReferenceItemParam, + ItemReferenceParam, ItemResource, + ItemResourceApplyPatchToolCall, + ItemResourceApplyPatchToolCallOutput, + ItemResourceCodeInterpreterToolCall, + ItemResourceComputerToolCall, + ItemResourceComputerToolCallOutputResource, + ItemResourceFileSearchToolCall, + ItemResourceFunctionShellCall, + ItemResourceFunctionShellCallOutput, + ItemResourceFunctionToolCallOutputResource, + ItemResourceFunctionToolCallResource, + ItemResourceImageGenToolCall, + ItemResourceLocalShellToolCall, + ItemResourceLocalShellToolCallOutput, + ItemResourceMcpApprovalRequest, + ItemResourceMcpApprovalResponseResource, + ItemResourceMcpListTools, + ItemResourceMcpToolCall, + ItemResourceOutputMessage, + ItemResourceWebSearchToolCall, + KeyPressAction, LocalShellExecAction, - LocalShellTool, - LocalShellToolCallItemParam, - LocalShellToolCallItemResource, - LocalShellToolCallOutputItemParam, - LocalShellToolCallOutputItemResource, - Location, + LocalShellToolParam, LogProb, - MCPApprovalRequestItemParam, - MCPApprovalRequestItemResource, - MCPApprovalResponseItemParam, - MCPApprovalResponseItemResource, - MCPCallItemParam, - MCPCallItemResource, - MCPListToolsItemParam, - MCPListToolsItemResource, MCPListToolsTool, + MCPListToolsToolAnnotations, + MCPListToolsToolInputSchema, MCPTool, - MCPToolAllowedTools1, - MCPToolRequireApproval1, - MCPToolRequireApprovalAlways, - MCPToolRequireApprovalNever, + MCPToolFilter, + MCPToolRequireApproval, ManagedAzureAISearchIndex, MemoryItem, MemoryOperation, MemorySearchItem, MemorySearchOptions, - MemorySearchTool, - MemorySearchToolCallItemParam, + MemorySearchPreviewTool, MemorySearchToolCallItemResource, MemoryStoreDefaultDefinition, MemoryStoreDefaultOptions, @@ -202,19 +239,17 @@ MemoryStoreDeleteScopeResult, MemoryStoreDetails, MemoryStoreOperationUsage, - MemoryStoreOperationUsageInputTokensDetails, - MemoryStoreOperationUsageOutputTokensDetails, MemoryStoreSearchResult, MemoryStoreUpdateCompletedResult, MemoryStoreUpdateResult, - MicrosoftFabricAgentTool, + MicrosoftFabricPreviewTool, ModelDeployment, ModelDeploymentSku, MonthlyRecurrenceSchedule, + Move, NoAuthenticationCredentials, OAuthConsentRequestItemResource, OneTimeTrigger, - OpenApiAgentTool, OpenApiAnonymousAuthDetails, OpenApiAuthDetails, OpenApiFunctionDefinition, @@ -223,9 +258,13 @@ OpenApiManagedSecurityScheme, OpenApiProjectConnectionAuthDetails, OpenApiProjectConnectionSecurityScheme, + OpenApiTool, + OutputContent, + OutputMessageContent, + OutputMessageContentOutputTextContent, + OutputMessageContentRefusalContent, PendingUploadRequest, PendingUploadResponse, - Prompt, PromptAgentDefinition, PromptAgentDefinitionText, PromptBasedEvaluatorDefinition, @@ -233,119 +272,51 @@ RaiConfig, RankingOptions, Reasoning, - ReasoningItemParam, - ReasoningItemResource, - ReasoningItemSummaryPart, - ReasoningItemSummaryTextPart, + ReasoningTextContent, RecurrenceSchedule, RecurrenceTrigger, RedTeam, - Response, - ResponseCodeInterpreterCallCodeDeltaEvent, - ResponseCodeInterpreterCallCodeDoneEvent, - ResponseCodeInterpreterCallCompletedEvent, - ResponseCodeInterpreterCallInProgressEvent, - ResponseCodeInterpreterCallInterpretingEvent, - ResponseCompletedEvent, - ResponseContentPartAddedEvent, - ResponseContentPartDoneEvent, - ResponseConversation1, - ResponseCreatedEvent, - ResponseError, - ResponseErrorEvent, - ResponseFailedEvent, - ResponseFileSearchCallCompletedEvent, - ResponseFileSearchCallInProgressEvent, - ResponseFileSearchCallSearchingEvent, - ResponseFunctionCallArgumentsDeltaEvent, - ResponseFunctionCallArgumentsDoneEvent, - ResponseImageGenCallCompletedEvent, - ResponseImageGenCallGeneratingEvent, - ResponseImageGenCallInProgressEvent, - ResponseImageGenCallPartialImageEvent, - ResponseInProgressEvent, - ResponseIncompleteDetails1, - ResponseIncompleteEvent, - ResponseMCPCallArgumentsDeltaEvent, - ResponseMCPCallArgumentsDoneEvent, - ResponseMCPCallCompletedEvent, - ResponseMCPCallFailedEvent, - ResponseMCPCallInProgressEvent, - ResponseMCPListToolsCompletedEvent, - ResponseMCPListToolsFailedEvent, - ResponseMCPListToolsInProgressEvent, - ResponseOutputItemAddedEvent, - ResponseOutputItemDoneEvent, - ResponsePromptVariables, - ResponseQueuedEvent, - ResponseReasoningDeltaEvent, - ResponseReasoningDoneEvent, - ResponseReasoningSummaryDeltaEvent, - ResponseReasoningSummaryDoneEvent, - ResponseReasoningSummaryPartAddedEvent, - ResponseReasoningSummaryPartDoneEvent, - ResponseReasoningSummaryTextDeltaEvent, - ResponseReasoningSummaryTextDoneEvent, - ResponseRefusalDeltaEvent, - ResponseRefusalDoneEvent, - ResponseStreamEvent, - ResponseText, - ResponseTextDeltaEvent, - ResponseTextDoneEvent, - ResponseTextFormatConfiguration, - ResponseTextFormatConfigurationJsonObject, - ResponseTextFormatConfigurationJsonSchema, - ResponseTextFormatConfigurationText, - ResponseUsage, - ResponseWebSearchCallCompletedEvent, - ResponseWebSearchCallInProgressEvent, - ResponseWebSearchCallSearchingEvent, - ResponsesAssistantMessageItemParam, - ResponsesAssistantMessageItemResource, - ResponsesDeveloperMessageItemParam, - ResponsesDeveloperMessageItemResource, - ResponsesMessageItemParam, - ResponsesMessageItemResource, - ResponsesSystemMessageItemParam, - ResponsesSystemMessageItemResource, - ResponsesUserMessageItemParam, - ResponsesUserMessageItemResource, + ResponseUsageInputTokensDetails, + ResponseUsageOutputTokensDetails, SASCredentials, Schedule, ScheduleRun, ScheduleTask, - SharepointAgentTool, + Screenshot, + Scroll, SharepointGroundingToolParameters, + SharepointPreviewTool, StructuredInputDefinition, StructuredOutputDefinition, StructuredOutputsItemResource, + Summary, Target, TargetConfig, TaxonomyCategory, TaxonomySubCategory, + TextResponseFormatConfiguration, + TextResponseFormatConfigurationResponseFormatJsonObject, + TextResponseFormatConfigurationResponseFormatText, + TextResponseFormatJsonSchema, Tool, - ToolChoiceObject, - ToolChoiceObjectCodeInterpreter, - ToolChoiceObjectComputer, - ToolChoiceObjectFileSearch, - ToolChoiceObjectFunction, - ToolChoiceObjectImageGen, - ToolChoiceObjectMCP, - ToolChoiceObjectWebSearch, ToolDescription, ToolProjectConnection, TopLogProb, Trigger, + Type, + UrlCitationBody, UserProfileMemoryItem, VectorStoreFileAttributes, - WebSearchAction, + Wait, WebSearchActionFind, WebSearchActionOpenPage, WebSearchActionSearch, WebSearchActionSearchSources, + WebSearchApproximateLocation, + WebSearchConfiguration, WebSearchPreviewTool, - WebSearchToolCallItemParam, - WebSearchToolCallItemResource, + WebSearchTool, + WebSearchToolFilters, WeeklyRecurrenceSchedule, WorkflowActionOutputItemResource, WorkflowAgentDefinition, @@ -355,16 +326,26 @@ AgentKind, AgentProtocol, AnnotationType, + ApplyPatchCallOutputStatus, + ApplyPatchCallOutputStatusParam, + ApplyPatchCallStatus, + ApplyPatchCallStatusParam, + ApplyPatchFileOperationType, + ApplyPatchOperationParamType, AttackStrategy, AzureAISearchQueryType, - CodeInterpreterOutputType, + ClickButtonType, ComputerActionType, - ComputerToolCallOutputItemOutputType, + ComputerEnvironment, ConnectionType, + ContainerLogKind, + ContainerMemoryLimit, CredentialType, + CustomToolParamFormatType, DatasetType, DayOfWeek, DeploymentType, + DetailEnum, EvaluationRuleActionType, EvaluationRuleEventType, EvaluationTaxonomyInputType, @@ -373,43 +354,49 @@ EvaluatorMetricDirection, EvaluatorMetricType, EvaluatorType, + FunctionAndCustomToolCallOutputType, + FunctionCallItemStatus, + FunctionShellCallItemStatus, + FunctionShellCallOutputOutcomeParamType, + FunctionShellCallOutputOutcomeType, + GrammarSyntax1, + ImageDetail, IndexType, + InputContentType, + InputFidelity, + InputItemType, InsightType, - ItemContentType, - ItemType, - LocationType, + ItemResourceType, + LocalShellCallStatus, + MCPToolCallStatus, MemoryItemKind, MemoryOperationKind, MemoryStoreKind, MemoryStoreUpdateStatus, OpenApiAuthType, OperationState, + OutputContentType, + OutputMessageContentType, + PageOrder, PendingUploadType, - ReasoningEffort, - ReasoningItemSummaryPartType, + RankerVersionType, RecurrenceType, - ResponseErrorCode, - ResponseStreamEventType, - ResponseTextFormatConfigurationType, - ResponsesMessageRole, RiskCategory, SampleType, ScheduleProvisioningStatus, ScheduleTaskType, - ServiceTier, - ToolChoiceObjectType, - ToolChoiceOptions, + SearchContextSize, + TextResponseFormatConfigurationType, ToolType, TreatmentEffectType, TriggerType, - WebSearchActionType, ) from ._patch import __all__ as _patch_all from ._patch import * from ._patch import patch_sdk as _patch_sdk __all__ = [ - "A2ATool", + "A2APreviewTool", "AISearchIndexResource", "AgentClusterInsightResult", "AgentClusterInsightsRequest", @@ -422,75 +409,69 @@ "AgentVersionDetails", "AgenticIdentityCredentials", "Annotation", - "AnnotationFileCitation", - "AnnotationFilePath", - "AnnotationUrlCitation", "ApiErrorResponse", "ApiKeyCredentials", + "ApplyPatchCreateFileOperation", + "ApplyPatchCreateFileOperationParam", + "ApplyPatchDeleteFileOperation", + "ApplyPatchDeleteFileOperationParam", + "ApplyPatchFileOperation", + "ApplyPatchOperationParam", + "ApplyPatchToolParam", + "ApplyPatchUpdateFileOperation", + "ApplyPatchUpdateFileOperationParam", "ApproximateLocation", "AzureAIAgentTarget", - "AzureAISearchAgentTool", "AzureAISearchIndex", + "AzureAISearchTool", "AzureAISearchToolResource", - "AzureFunctionAgentTool", "AzureFunctionBinding", "AzureFunctionDefinition", "AzureFunctionDefinitionFunction", "AzureFunctionStorageQueue", + "AzureFunctionTool", "AzureOpenAIModelConfiguration", "BaseCredentials", - "BingCustomSearchAgentTool", "BingCustomSearchConfiguration", + "BingCustomSearchPreviewTool", "BingCustomSearchToolParameters", - "BingGroundingAgentTool", "BingGroundingSearchConfiguration", "BingGroundingSearchToolParameters", + "BingGroundingTool", "BlobReference", "BlobReferenceSasCredential", - "BrowserAutomationAgentTool", + "BrowserAutomationPreviewTool", "BrowserAutomationToolConnectionParameters", "BrowserAutomationToolParameters", "CaptureStructuredOutputsTool", "ChartCoordinate", "ChatSummaryMemoryItem", + "ClickParam", "ClusterInsightResult", "ClusterTokenUsage", "CodeBasedEvaluatorDefinition", - "CodeInterpreterOutput", + "CodeInterpreterContainerAuto", "CodeInterpreterOutputImage", "CodeInterpreterOutputLogs", "CodeInterpreterTool", - "CodeInterpreterToolAuto", - "CodeInterpreterToolCallItemParam", - "CodeInterpreterToolCallItemResource", "ComparisonFilter", "CompoundFilter", "ComputerAction", - "ComputerActionClick", - "ComputerActionDoubleClick", - "ComputerActionDrag", - "ComputerActionKeyPress", - "ComputerActionMove", - "ComputerActionScreenshot", - "ComputerActionScroll", - "ComputerActionTypeKeys", - "ComputerActionWait", - "ComputerToolCallItemParam", - "ComputerToolCallItemResource", - "ComputerToolCallOutputItemOutput", - "ComputerToolCallOutputItemOutputComputerScreenshot", - "ComputerToolCallOutputItemParam", - "ComputerToolCallOutputItemResource", - "ComputerToolCallSafetyCheck", + "ComputerCallSafetyCheckParam", + "ComputerScreenshotImage", "ComputerUsePreviewTool", "Connection", "ContainerAppAgentDefinition", + "ContainerFileCitationBody", "ContinuousEvaluationRuleAction", - "Coordinate", "CosmosDBIndex", "CreatedBy", "CronTrigger", "CustomCredential", + "CustomGrammarFormatParam", + "CustomTextFormatParam", + "CustomToolParam", + "CustomToolParamFormat", "DailyRecurrenceSchedule", "DatasetCredential", "DatasetVersion", @@ -498,6 +479,10 @@ "DeleteAgentVersionResponse", "DeleteMemoryStoreResult", "Deployment", + "DoubleClickAction", + "Drag", + "DragPoint", + "EasyInputMessage", "EmbeddingConfiguration", "EntraIDCredentials", "Error", @@ -521,26 +506,68 @@ "EvaluatorVersion", "FabricDataAgentToolParameters", "FieldMapping", + "FileCitationBody", "FileDatasetVersion", + "FilePath", "FileSearchTool", - "FileSearchToolCallItemParam", - "FileSearchToolCallItemParamResult", - "FileSearchToolCallItemResource", + "FileSearchToolCallResults", "FolderDatasetVersion", + "FunctionAndCustomToolCallOutput", + "FunctionAndCustomToolCallOutputInputFileContent", + "FunctionAndCustomToolCallOutputInputImageContent", + "FunctionAndCustomToolCallOutputInputTextContent", + "FunctionShellAction", + "FunctionShellActionParam", + "FunctionShellCallOutputContent", + "FunctionShellCallOutputContentParam", + "FunctionShellCallOutputExitOutcome", + "FunctionShellCallOutputExitOutcomeParam", + "FunctionShellCallOutputOutcome", + "FunctionShellCallOutputOutcomeParam", + "FunctionShellCallOutputTimeoutOutcome", + "FunctionShellCallOutputTimeoutOutcomeParam", + "FunctionShellToolParam", "FunctionTool", - "FunctionToolCallItemParam", - "FunctionToolCallItemResource", - "FunctionToolCallOutputItemParam", - "FunctionToolCallOutputItemResource", "HostedAgentDefinition", "HourlyRecurrenceSchedule", "HumanEvaluationRuleAction", + "HybridSearchOptions", "ImageBasedHostedAgentDefinition", "ImageGenTool", - "ImageGenToolCallItemParam", - "ImageGenToolCallItemResource", "ImageGenToolInputImageMask", "Index", + "InputContent", + "InputContentInputFileContent", + "InputContentInputImageContent", + "InputContentInputTextContent", + "InputFileContentParam", + "InputImageContentParamAutoParam", + "InputItem", + "InputItemApplyPatchToolCallItemParam", + "InputItemApplyPatchToolCallOutputItemParam", + "InputItemCodeInterpreterToolCall", + "InputItemCompactionSummaryItemParam", + "InputItemComputerCallOutputItemParam", + "InputItemComputerToolCall", + "InputItemCustomToolCall", + "InputItemCustomToolCallOutput", + "InputItemFileSearchToolCall", + "InputItemFunctionCallOutputItemParam", + "InputItemFunctionShellCallItemParam", + "InputItemFunctionShellCallOutputItemParam", + "InputItemFunctionToolCall", + "InputItemImageGenToolCall", + "InputItemLocalShellToolCall", + "InputItemLocalShellToolCallOutput", + "InputItemMcpApprovalRequest", + "InputItemMcpApprovalResponse", + "InputItemMcpListTools", + "InputItemMcpToolCall", + "InputItemOutputMessage", + "InputItemReasoningItem", + "InputItemWebSearchToolCall", + "InputMessageResource", + "InputTextContentParam", "Insight", "InsightCluster", "InsightModelConfiguration", @@ -550,46 +577,43 @@ "InsightScheduleTask", "InsightSummary", "InsightsMetadata", - "ItemContent", - "ItemContentInputAudio", - "ItemContentInputFile", - "ItemContentInputImage", - "ItemContentInputText", - "ItemContentOutputAudio", - "ItemContentOutputText", - "ItemContentRefusal", - "ItemParam", - "ItemReferenceItemParam", + "ItemReferenceParam", "ItemResource", + "ItemResourceApplyPatchToolCall", + "ItemResourceApplyPatchToolCallOutput", + "ItemResourceCodeInterpreterToolCall", + "ItemResourceComputerToolCall", + "ItemResourceComputerToolCallOutputResource", + "ItemResourceFileSearchToolCall", + "ItemResourceFunctionShellCall", + "ItemResourceFunctionShellCallOutput", + "ItemResourceFunctionToolCallOutputResource", + "ItemResourceFunctionToolCallResource", + "ItemResourceImageGenToolCall", + "ItemResourceLocalShellToolCall", + "ItemResourceLocalShellToolCallOutput", + "ItemResourceMcpApprovalRequest", + "ItemResourceMcpApprovalResponseResource", + "ItemResourceMcpListTools", + "ItemResourceMcpToolCall", + "ItemResourceOutputMessage", + "ItemResourceWebSearchToolCall", + "KeyPressAction", "LocalShellExecAction", - "LocalShellTool", - "LocalShellToolCallItemParam", - "LocalShellToolCallItemResource", - "LocalShellToolCallOutputItemParam", - "LocalShellToolCallOutputItemResource", - "Location", + "LocalShellToolParam", "LogProb", - "MCPApprovalRequestItemParam", - "MCPApprovalRequestItemResource", - "MCPApprovalResponseItemParam", - "MCPApprovalResponseItemResource", - "MCPCallItemParam", - "MCPCallItemResource", - "MCPListToolsItemParam", - "MCPListToolsItemResource", "MCPListToolsTool", + "MCPListToolsToolAnnotations", + "MCPListToolsToolInputSchema", "MCPTool", - "MCPToolAllowedTools1", - "MCPToolRequireApproval1", - "MCPToolRequireApprovalAlways", - "MCPToolRequireApprovalNever", + "MCPToolFilter", + "MCPToolRequireApproval", "ManagedAzureAISearchIndex", "MemoryItem", "MemoryOperation", "MemorySearchItem", "MemorySearchOptions", - "MemorySearchTool", - "MemorySearchToolCallItemParam", + "MemorySearchPreviewTool", "MemorySearchToolCallItemResource", "MemoryStoreDefaultDefinition", "MemoryStoreDefaultOptions", @@ -597,19 +621,17 @@ "MemoryStoreDeleteScopeResult", "MemoryStoreDetails", "MemoryStoreOperationUsage", - "MemoryStoreOperationUsageInputTokensDetails", - "MemoryStoreOperationUsageOutputTokensDetails", "MemoryStoreSearchResult", "MemoryStoreUpdateCompletedResult", "MemoryStoreUpdateResult", - "MicrosoftFabricAgentTool", + "MicrosoftFabricPreviewTool", "ModelDeployment", "ModelDeploymentSku", "MonthlyRecurrenceSchedule", + "Move", "NoAuthenticationCredentials", "OAuthConsentRequestItemResource", "OneTimeTrigger", - "OpenApiAgentTool", "OpenApiAnonymousAuthDetails", "OpenApiAuthDetails", "OpenApiFunctionDefinition", @@ -618,9 +640,13 @@ "OpenApiManagedSecurityScheme", "OpenApiProjectConnectionAuthDetails", "OpenApiProjectConnectionSecurityScheme", + "OpenApiTool", + "OutputContent", + "OutputMessageContent", + "OutputMessageContentOutputTextContent", + "OutputMessageContentRefusalContent", "PendingUploadRequest", "PendingUploadResponse", - "Prompt", "PromptAgentDefinition", "PromptAgentDefinitionText", "PromptBasedEvaluatorDefinition", @@ -628,135 +654,77 @@ "RaiConfig", "RankingOptions", "Reasoning", - "ReasoningItemParam", - "ReasoningItemResource", - "ReasoningItemSummaryPart", - "ReasoningItemSummaryTextPart", + "ReasoningTextContent", "RecurrenceSchedule", "RecurrenceTrigger", "RedTeam", - "Response", - "ResponseCodeInterpreterCallCodeDeltaEvent", - "ResponseCodeInterpreterCallCodeDoneEvent", - "ResponseCodeInterpreterCallCompletedEvent", - "ResponseCodeInterpreterCallInProgressEvent", - "ResponseCodeInterpreterCallInterpretingEvent", - "ResponseCompletedEvent", - "ResponseContentPartAddedEvent", - "ResponseContentPartDoneEvent", - "ResponseConversation1", - "ResponseCreatedEvent", - "ResponseError", - "ResponseErrorEvent", - "ResponseFailedEvent", - "ResponseFileSearchCallCompletedEvent", - "ResponseFileSearchCallInProgressEvent", - "ResponseFileSearchCallSearchingEvent", - "ResponseFunctionCallArgumentsDeltaEvent", - "ResponseFunctionCallArgumentsDoneEvent", - "ResponseImageGenCallCompletedEvent", - "ResponseImageGenCallGeneratingEvent", - "ResponseImageGenCallInProgressEvent", - "ResponseImageGenCallPartialImageEvent", - "ResponseInProgressEvent", - "ResponseIncompleteDetails1", - "ResponseIncompleteEvent", - "ResponseMCPCallArgumentsDeltaEvent", - "ResponseMCPCallArgumentsDoneEvent", - "ResponseMCPCallCompletedEvent", - "ResponseMCPCallFailedEvent", - "ResponseMCPCallInProgressEvent", - "ResponseMCPListToolsCompletedEvent", - "ResponseMCPListToolsFailedEvent", - "ResponseMCPListToolsInProgressEvent", - "ResponseOutputItemAddedEvent", - "ResponseOutputItemDoneEvent", - "ResponsePromptVariables", - "ResponseQueuedEvent", - "ResponseReasoningDeltaEvent", - "ResponseReasoningDoneEvent", - "ResponseReasoningSummaryDeltaEvent", - "ResponseReasoningSummaryDoneEvent", - "ResponseReasoningSummaryPartAddedEvent", - "ResponseReasoningSummaryPartDoneEvent", - "ResponseReasoningSummaryTextDeltaEvent", - "ResponseReasoningSummaryTextDoneEvent", - "ResponseRefusalDeltaEvent", - "ResponseRefusalDoneEvent", - "ResponseStreamEvent", - "ResponseText", - "ResponseTextDeltaEvent", - "ResponseTextDoneEvent", - "ResponseTextFormatConfiguration", - "ResponseTextFormatConfigurationJsonObject", - "ResponseTextFormatConfigurationJsonSchema", - "ResponseTextFormatConfigurationText", - "ResponseUsage", - "ResponseWebSearchCallCompletedEvent", - "ResponseWebSearchCallInProgressEvent", - "ResponseWebSearchCallSearchingEvent", - "ResponsesAssistantMessageItemParam", - "ResponsesAssistantMessageItemResource", - "ResponsesDeveloperMessageItemParam", - "ResponsesDeveloperMessageItemResource", - "ResponsesMessageItemParam", - "ResponsesMessageItemResource", - "ResponsesSystemMessageItemParam", - "ResponsesSystemMessageItemResource", - "ResponsesUserMessageItemParam", - "ResponsesUserMessageItemResource", + "ResponseUsageInputTokensDetails", + "ResponseUsageOutputTokensDetails", "SASCredentials", "Schedule", "ScheduleRun", "ScheduleTask", - "SharepointAgentTool", + "Screenshot", + "Scroll", "SharepointGroundingToolParameters", + "SharepointPreviewTool", "StructuredInputDefinition", "StructuredOutputDefinition", "StructuredOutputsItemResource", + "Summary", "Target", "TargetConfig", "TaxonomyCategory", "TaxonomySubCategory", + "TextResponseFormatConfiguration", + "TextResponseFormatConfigurationResponseFormatJsonObject", + "TextResponseFormatConfigurationResponseFormatText", + "TextResponseFormatJsonSchema", "Tool", - "ToolChoiceObject", - "ToolChoiceObjectCodeInterpreter", - "ToolChoiceObjectComputer", - "ToolChoiceObjectFileSearch", - "ToolChoiceObjectFunction", - "ToolChoiceObjectImageGen", - "ToolChoiceObjectMCP", - "ToolChoiceObjectWebSearch", "ToolDescription", "ToolProjectConnection", "TopLogProb", "Trigger", + "Type", + "UrlCitationBody", "UserProfileMemoryItem", "VectorStoreFileAttributes", - "WebSearchAction", + "Wait", "WebSearchActionFind", "WebSearchActionOpenPage", "WebSearchActionSearch", "WebSearchActionSearchSources", + "WebSearchApproximateLocation", + "WebSearchConfiguration", "WebSearchPreviewTool", - "WebSearchToolCallItemParam", - "WebSearchToolCallItemResource", + "WebSearchTool", + "WebSearchToolFilters", "WeeklyRecurrenceSchedule", "WorkflowActionOutputItemResource", "WorkflowAgentDefinition", "AgentKind", "AgentProtocol", "AnnotationType", + "ApplyPatchCallOutputStatus", + "ApplyPatchCallOutputStatusParam", + "ApplyPatchCallStatus", + "ApplyPatchCallStatusParam", + "ApplyPatchFileOperationType", + "ApplyPatchOperationParamType", "AttackStrategy", "AzureAISearchQueryType", - "CodeInterpreterOutputType", + "ClickButtonType", "ComputerActionType", - "ComputerToolCallOutputItemOutputType", + "ComputerEnvironment", "ConnectionType", + "ContainerLogKind", + "ContainerMemoryLimit", "CredentialType", + "CustomToolParamFormatType", "DatasetType", "DayOfWeek", "DeploymentType", + "DetailEnum", "EvaluationRuleActionType", "EvaluationRuleEventType", "EvaluationTaxonomyInputType", @@ -765,36 +733,42 @@ "EvaluatorMetricDirection", "EvaluatorMetricType", "EvaluatorType", + "FunctionAndCustomToolCallOutputType", + "FunctionCallItemStatus", + "FunctionShellCallItemStatus", + "FunctionShellCallOutputOutcomeParamType", + "FunctionShellCallOutputOutcomeType", + "GrammarSyntax1", + "ImageDetail", "IndexType", + "InputContentType", + "InputFidelity", + "InputItemType", "InsightType", - "ItemContentType", - "ItemType", - "LocationType", + "ItemResourceType", + "LocalShellCallStatus", + "MCPToolCallStatus", "MemoryItemKind", "MemoryOperationKind", "MemoryStoreKind", "MemoryStoreUpdateStatus", "OpenApiAuthType", "OperationState", + "OutputContentType", + "OutputMessageContentType", + "PageOrder", "PendingUploadType", - "ReasoningEffort", - "ReasoningItemSummaryPartType", + "RankerVersionType", "RecurrenceType", - "ResponseErrorCode", - "ResponseStreamEventType", - "ResponseTextFormatConfigurationType", - "ResponsesMessageRole", "RiskCategory", "SampleType", "ScheduleProvisioningStatus", "ScheduleTaskType", - "ServiceTier", - "ToolChoiceObjectType", - "ToolChoiceOptions", + "SearchContextSize", + "TextResponseFormatConfigurationType", "ToolType", "TreatmentEffectType", "TriggerType", - "WebSearchActionType", ] __all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore _patch_sdk() diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_enums.py b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_enums.py index 62d671d2c619..145ae30df168 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_enums.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_enums.py @@ -31,8 +31,52 @@ class AnnotationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): FILE_CITATION = "file_citation" URL_CITATION = "url_citation" - FILE_PATH = "file_path" CONTAINER_FILE_CITATION = "container_file_citation" + FILE_PATH = "file_path" + + +class ApplyPatchCallOutputStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of ApplyPatchCallOutputStatus.""" + + COMPLETED = "completed" + FAILED = "failed" + + +class ApplyPatchCallOutputStatusParam(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Apply patch call output status.""" + + COMPLETED = "completed" + FAILED = "failed" + + +class ApplyPatchCallStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of ApplyPatchCallStatus.""" + + IN_PROGRESS = "in_progress" + COMPLETED = "completed" + + +class ApplyPatchCallStatusParam(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Apply patch call status.""" + + IN_PROGRESS = "in_progress" + COMPLETED = "completed" + + +class ApplyPatchFileOperationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of ApplyPatchFileOperationType.""" + + CREATE_FILE = "create_file" + DELETE_FILE = "delete_file" + UPDATE_FILE = "update_file" + + +class ApplyPatchOperationParamType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of ApplyPatchOperationParamType.""" + + CREATE_FILE = "create_file" + DELETE_FILE = "delete_file" + UPDATE_FILE = "update_file" class AttackStrategy(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -68,7 +112,7 @@ class AttackStrategy(str, Enum, metaclass=CaseInsensitiveEnumMeta): JAILBREAK = "jailbreak" """Injects specially crafted prompts to bypass AI safeguards, known as User Injected Prompt Attacks (UPIA).""" - ANSII_ATTACK = "ansii_attack" + ANSI_ATTACK = "ansi_attack" """Utilizes ANSI escape sequences to manipulate text appearance and behavior.""" CHARACTER_SWAP = "character_swap" """Swaps characters within text to create variations or obfuscate the original content.""" @@ -121,31 +165,38 @@ class AzureAISearchQueryType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Query type ``vector_semantic_hybrid``""" -class CodeInterpreterOutputType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Type of CodeInterpreterOutputType.""" +class ClickButtonType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of ClickButtonType.""" - LOGS = "logs" - IMAGE = "image" + LEFT = "left" + RIGHT = "right" + WHEEL = "wheel" + BACK = "back" + FORWARD = "forward" class ComputerActionType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Type of ComputerActionType.""" - SCREENSHOT = "screenshot" CLICK = "click" DOUBLE_CLICK = "double_click" + DRAG = "drag" + KEYPRESS = "keypress" + MOVE = "move" + SCREENSHOT = "screenshot" SCROLL = "scroll" TYPE = "type" WAIT = "wait" - KEYPRESS = "keypress" - DRAG = "drag" - MOVE = "move" -class ComputerToolCallOutputItemOutputType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """A computer screenshot image used with the computer use tool.""" +class ComputerEnvironment(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of ComputerEnvironment.""" - SCREENSHOT = "computer_screenshot" + WINDOWS = "windows" + MAC = "mac" + LINUX = "linux" + UBUNTU = "ubuntu" + BROWSER = "browser" class ConnectionType(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -173,6 +224,24 @@ class ConnectionType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Remote tool""" +class ContainerLogKind(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The type of logs to stream from a container.""" + + CONSOLE = "console" + """Console logs from the container.""" + SYSTEM = "system" + """System logs from the container.""" + + +class ContainerMemoryLimit(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of ContainerMemoryLimit.""" + + ENUM_1_G = "1g" + ENUM_4_G = "4g" + ENUM_16_G = "16g" + ENUM_64_G = "64g" + + class CredentialType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The credential type used by the connection.""" @@ -190,6 +259,13 @@ class CredentialType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Agentic identity credential""" +class CustomToolParamFormatType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of CustomToolParamFormatType.""" + + TEXT = "text" + GRAMMAR = "grammar" + + class DatasetType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Enum to determine the type of data.""" @@ -225,6 +301,14 @@ class DeploymentType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Model deployment""" +class DetailEnum(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of DetailEnum.""" + + LOW = "low" + HIGH = "high" + AUTO = "auto" + + class EvaluationRuleActionType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Type of the evaluation action.""" @@ -311,6 +395,59 @@ class EvaluatorType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Custom evaluator""" +class FunctionAndCustomToolCallOutputType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of FunctionAndCustomToolCallOutputType.""" + + INPUT_TEXT = "input_text" + INPUT_IMAGE = "input_image" + INPUT_FILE = "input_file" + + +class FunctionCallItemStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of FunctionCallItemStatus.""" + + IN_PROGRESS = "in_progress" + COMPLETED = "completed" + INCOMPLETE = "incomplete" + + +class FunctionShellCallItemStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Shell call status.""" + + IN_PROGRESS = "in_progress" + COMPLETED = "completed" + INCOMPLETE = "incomplete" + + +class FunctionShellCallOutputOutcomeParamType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of FunctionShellCallOutputOutcomeParamType.""" + + TIMEOUT = "timeout" + EXIT = "exit" + + +class FunctionShellCallOutputOutcomeType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of FunctionShellCallOutputOutcomeType.""" + + TIMEOUT = "timeout" + EXIT = "exit" + + +class GrammarSyntax1(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of GrammarSyntax1.""" + + LARK = "lark" + REGEX = "regex" + + +class ImageDetail(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of ImageDetail.""" + + LOW = "low" + HIGH = "high" + AUTO = "auto" + + class IndexType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Type of IndexType.""" @@ -322,6 +459,54 @@ class IndexType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Managed Azure Search""" +class InputContentType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of InputContentType.""" + + INPUT_TEXT = "input_text" + INPUT_IMAGE = "input_image" + INPUT_FILE = "input_file" + + +class InputFidelity(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Control how much effort the model will exert to match the style and features, especially facial + features, of input images. This parameter is only supported for ``gpt-image-1``. Unsupported + for ``gpt-image-1-mini``. Supports ``high`` and ``low``. Defaults to ``low``. + """ + + HIGH = "high" + LOW = "low" + + +class InputItemType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of InputItemType.""" + + MESSAGE = "message" + OUTPUT_MESSAGE = "output_message" + FILE_SEARCH_CALL = "file_search_call" + COMPUTER_CALL = "computer_call" + COMPUTER_CALL_OUTPUT = "computer_call_output" + WEB_SEARCH_CALL = "web_search_call" + FUNCTION_CALL = "function_call" + FUNCTION_CALL_OUTPUT = "function_call_output" + REASONING = "reasoning" + COMPACTION = "compaction" + IMAGE_GENERATION_CALL = "image_generation_call" + CODE_INTERPRETER_CALL = "code_interpreter_call" + LOCAL_SHELL_CALL = "local_shell_call" + LOCAL_SHELL_CALL_OUTPUT = "local_shell_call_output" + SHELL_CALL = "shell_call" + SHELL_CALL_OUTPUT = "shell_call_output" + APPLY_PATCH_CALL = "apply_patch_call" + APPLY_PATCH_CALL_OUTPUT = "apply_patch_call_output" + MCP_LIST_TOOLS = "mcp_list_tools" + MCP_APPROVAL_REQUEST = "mcp_approval_request" + MCP_APPROVAL_RESPONSE = "mcp_approval_response" + MCP_CALL = "mcp_call" + CUSTOM_TOOL_CALL_OUTPUT = "custom_tool_call_output" + CUSTOM_TOOL_CALL = "custom_tool_call" + ITEM_REFERENCE = "item_reference" + + class InsightType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The request of the insights.""" @@ -333,34 +518,25 @@ class InsightType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Evaluation Comparison.""" -class ItemContentType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Multi-modal input and output contents.""" - - INPUT_TEXT = "input_text" - INPUT_AUDIO = "input_audio" - INPUT_IMAGE = "input_image" - INPUT_FILE = "input_file" - OUTPUT_TEXT = "output_text" - OUTPUT_AUDIO = "output_audio" - REFUSAL = "refusal" - - -class ItemType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Type of ItemType.""" +class ItemResourceType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of ItemResourceType.""" MESSAGE = "message" + OUTPUT_MESSAGE = "output_message" FILE_SEARCH_CALL = "file_search_call" - FUNCTION_CALL = "function_call" - FUNCTION_CALL_OUTPUT = "function_call_output" COMPUTER_CALL = "computer_call" COMPUTER_CALL_OUTPUT = "computer_call_output" WEB_SEARCH_CALL = "web_search_call" - REASONING = "reasoning" - ITEM_REFERENCE = "item_reference" + FUNCTION_CALL = "function_call" + FUNCTION_CALL_OUTPUT = "function_call_output" IMAGE_GENERATION_CALL = "image_generation_call" CODE_INTERPRETER_CALL = "code_interpreter_call" LOCAL_SHELL_CALL = "local_shell_call" LOCAL_SHELL_CALL_OUTPUT = "local_shell_call_output" + SHELL_CALL = "shell_call" + SHELL_CALL_OUTPUT = "shell_call_output" + APPLY_PATCH_CALL = "apply_patch_call" + APPLY_PATCH_CALL_OUTPUT = "apply_patch_call_output" MCP_LIST_TOOLS = "mcp_list_tools" MCP_APPROVAL_REQUEST = "mcp_approval_request" MCP_APPROVAL_RESPONSE = "mcp_approval_response" @@ -371,10 +547,22 @@ class ItemType(str, Enum, metaclass=CaseInsensitiveEnumMeta): OAUTH_CONSENT_REQUEST = "oauth_consent_request" -class LocationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Type of LocationType.""" +class LocalShellCallStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of LocalShellCallStatus.""" + + IN_PROGRESS = "in_progress" + COMPLETED = "completed" + INCOMPLETE = "incomplete" - APPROXIMATE = "approximate" + +class MCPToolCallStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of MCPToolCallStatus.""" + + IN_PROGRESS = "in_progress" + COMPLETED = "completed" + INCOMPLETE = "incomplete" + CALLING = "calling" + FAILED = "failed" class MemoryItemKind(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -442,43 +630,42 @@ class OperationState(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The operation has been canceled by the user.""" -class PendingUploadType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The type of pending upload.""" +class OutputContentType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of OutputContentType.""" - NONE = "None" - """No pending upload.""" - BLOB_REFERENCE = "BlobReference" - """Blob Reference is the only supported type.""" + OUTPUT_TEXT = "output_text" + REFUSAL = "refusal" + REASONING_TEXT = "reasoning_text" -class ReasoningEffort(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Constrains effort on reasoning for reasoning models. +class OutputMessageContentType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of OutputMessageContentType.""" - Currently supported values are none, minimal, low, medium, and high. + OUTPUT_TEXT = "output_text" + REFUSAL = "refusal" - Reducing reasoning effort can result in faster responses and fewer tokens used on reasoning in - a response. - gpt-5.1 defaults to none, which does not perform reasoning. The supported reasoning values for - gpt-5.1 are none, low, medium, and high. Tool calls are supported for all reasoning values in - gpt-5.1. +class PageOrder(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of PageOrder.""" - All models before gpt-5.1 default to medium reasoning effort, and do not support none. + ASC = "asc" + DESC = "desc" - The gpt-5-pro model defaults to (and only supports) high reasoning effort. - """ - NONE = "none" - MINIMAL = "minimal" - LOW = "low" - MEDIUM = "medium" - HIGH = "high" +class PendingUploadType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The type of pending upload.""" + + NONE = "None" + """No pending upload.""" + BLOB_REFERENCE = "BlobReference" + """Blob Reference is the only supported type.""" -class ReasoningItemSummaryPartType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Type of ReasoningItemSummaryPartType.""" +class RankerVersionType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of RankerVersionType.""" - SUMMARY_TEXT = "summary_text" + AUTO = "auto" + DEFAULT2024_11_15 = "default-2024-11-15" class RecurrenceType(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -494,117 +681,6 @@ class RecurrenceType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Monthly recurrence pattern.""" -class ResponseErrorCode(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The error code for the response.""" - - SERVER_ERROR = "server_error" - RATE_LIMIT_EXCEEDED = "rate_limit_exceeded" - INVALID_PROMPT = "invalid_prompt" - VECTOR_STORE_TIMEOUT = "vector_store_timeout" - INVALID_IMAGE = "invalid_image" - INVALID_IMAGE_FORMAT = "invalid_image_format" - INVALID_BASE64_IMAGE = "invalid_base64_image" - INVALID_IMAGE_URL = "invalid_image_url" - IMAGE_TOO_LARGE = "image_too_large" - IMAGE_TOO_SMALL = "image_too_small" - IMAGE_PARSE_ERROR = "image_parse_error" - IMAGE_CONTENT_POLICY_VIOLATION = "image_content_policy_violation" - INVALID_IMAGE_MODE = "invalid_image_mode" - IMAGE_FILE_TOO_LARGE = "image_file_too_large" - UNSUPPORTED_IMAGE_MEDIA_TYPE = "unsupported_image_media_type" - EMPTY_IMAGE_FILE = "empty_image_file" - FAILED_TO_DOWNLOAD_IMAGE = "failed_to_download_image" - IMAGE_FILE_NOT_FOUND = "image_file_not_found" - - -class ResponsesMessageRole(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The collection of valid roles for responses message items.""" - - SYSTEM = "system" - DEVELOPER = "developer" - USER = "user" - ASSISTANT = "assistant" - - -class ResponseStreamEventType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Type of ResponseStreamEventType.""" - - RESPONSE_AUDIO_DELTA = "response.audio.delta" - RESPONSE_AUDIO_DONE = "response.audio.done" - RESPONSE_AUDIO_TRANSCRIPT_DELTA = "response.audio_transcript.delta" - RESPONSE_AUDIO_TRANSCRIPT_DONE = "response.audio_transcript.done" - RESPONSE_CODE_INTERPRETER_CALL_CODE_DELTA = "response.code_interpreter_call_code.delta" - RESPONSE_CODE_INTERPRETER_CALL_CODE_DONE = "response.code_interpreter_call_code.done" - RESPONSE_CODE_INTERPRETER_CALL_COMPLETED = "response.code_interpreter_call.completed" - RESPONSE_CODE_INTERPRETER_CALL_IN_PROGRESS = "response.code_interpreter_call.in_progress" - RESPONSE_CODE_INTERPRETER_CALL_INTERPRETING = "response.code_interpreter_call.interpreting" - RESPONSE_COMPLETED = "response.completed" - RESPONSE_CONTENT_PART_ADDED = "response.content_part.added" - RESPONSE_CONTENT_PART_DONE = "response.content_part.done" - RESPONSE_CREATED = "response.created" - ERROR = "error" - RESPONSE_FILE_SEARCH_CALL_COMPLETED = "response.file_search_call.completed" - RESPONSE_FILE_SEARCH_CALL_IN_PROGRESS = "response.file_search_call.in_progress" - RESPONSE_FILE_SEARCH_CALL_SEARCHING = "response.file_search_call.searching" - RESPONSE_FUNCTION_CALL_ARGUMENTS_DELTA = "response.function_call_arguments.delta" - RESPONSE_FUNCTION_CALL_ARGUMENTS_DONE = "response.function_call_arguments.done" - RESPONSE_IN_PROGRESS = "response.in_progress" - RESPONSE_FAILED = "response.failed" - RESPONSE_INCOMPLETE = "response.incomplete" - RESPONSE_OUTPUT_ITEM_ADDED = "response.output_item.added" - RESPONSE_OUTPUT_ITEM_DONE = "response.output_item.done" - RESPONSE_REFUSAL_DELTA = "response.refusal.delta" - RESPONSE_REFUSAL_DONE = "response.refusal.done" - RESPONSE_OUTPUT_TEXT_ANNOTATION_ADDED = "response.output_text.annotation.added" - RESPONSE_OUTPUT_TEXT_DELTA = "response.output_text.delta" - RESPONSE_OUTPUT_TEXT_DONE = "response.output_text.done" - RESPONSE_REASONING_SUMMARY_PART_ADDED = "response.reasoning_summary_part.added" - RESPONSE_REASONING_SUMMARY_PART_DONE = "response.reasoning_summary_part.done" - RESPONSE_REASONING_SUMMARY_TEXT_DELTA = "response.reasoning_summary_text.delta" - RESPONSE_REASONING_SUMMARY_TEXT_DONE = "response.reasoning_summary_text.done" - RESPONSE_WEB_SEARCH_CALL_COMPLETED = "response.web_search_call.completed" - RESPONSE_WEB_SEARCH_CALL_IN_PROGRESS = "response.web_search_call.in_progress" - RESPONSE_WEB_SEARCH_CALL_SEARCHING = "response.web_search_call.searching" - RESPONSE_IMAGE_GENERATION_CALL_COMPLETED = "response.image_generation_call.completed" - RESPONSE_IMAGE_GENERATION_CALL_GENERATING = "response.image_generation_call.generating" - RESPONSE_IMAGE_GENERATION_CALL_IN_PROGRESS = "response.image_generation_call.in_progress" - RESPONSE_IMAGE_GENERATION_CALL_PARTIAL_IMAGE = "response.image_generation_call.partial_image" - RESPONSE_MCP_CALL_ARGUMENTS_DELTA = "response.mcp_call.arguments_delta" - RESPONSE_MCP_CALL_ARGUMENTS_DONE = "response.mcp_call.arguments_done" - RESPONSE_MCP_CALL_COMPLETED = "response.mcp_call.completed" - RESPONSE_MCP_CALL_FAILED = "response.mcp_call.failed" - RESPONSE_MCP_CALL_IN_PROGRESS = "response.mcp_call.in_progress" - RESPONSE_MCP_LIST_TOOLS_COMPLETED = "response.mcp_list_tools.completed" - RESPONSE_MCP_LIST_TOOLS_FAILED = "response.mcp_list_tools.failed" - RESPONSE_MCP_LIST_TOOLS_IN_PROGRESS = "response.mcp_list_tools.in_progress" - RESPONSE_QUEUED = "response.queued" - RESPONSE_REASONING_DELTA = "response.reasoning.delta" - RESPONSE_REASONING_DONE = "response.reasoning.done" - RESPONSE_REASONING_SUMMARY_DELTA = "response.reasoning_summary.delta" - RESPONSE_REASONING_SUMMARY_DONE = "response.reasoning_summary.done" - - -class ResponseTextFormatConfigurationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """An object specifying the format that the model must output. - - Configuring ``{ "type": "json_schema" }`` enables Structured Outputs, - which ensures the model will match your supplied JSON schema. Learn more in the - `Structured Outputs guide `_. - - The default format is ``{ "type": "text" }`` with no additional options. - - **Not recommended for gpt-4o and newer models:** - - Setting to ``{ "type": "json_object" }`` enables the older JSON mode, which - ensures the message the model generates is valid JSON. Using ``json_schema`` - is preferred for models that support it. - """ - - TEXT = "text" - JSON_SCHEMA = "json_schema" - JSON_OBJECT = "json_object" - - class RiskCategory(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Risk category for the attack objective.""" @@ -661,83 +737,47 @@ class ScheduleTaskType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Insight task.""" -class ServiceTier(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Specifies the processing type used for serving the request. - - * If set to 'auto', then the request will be processed with the service tier - configured in the Project settings. Unless otherwise configured, the Project will use - 'default'. - * If set to 'default', then the request will be processed with the standard - pricing and performance for the selected model. - * If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' - or 'priority', then the request will be processed with the corresponding service - tier. [Contact sales](https://openai.com/contact-sales) to learn more about Priority - processing. - * When not set, the default behavior is 'auto'. - - When the ``service_tier`` parameter is set, the response body will include the ``service_tier`` - value based on the processing mode actually used to serve the request. This response value - may be different from the value set in the parameter. - """ - - AUTO = "auto" - DEFAULT = "default" - FLEX = "flex" - SCALE = "scale" - PRIORITY = "priority" - - -class ToolChoiceObjectType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Indicates that the model should use a built-in tool to generate a response. - `Learn more about built-in tools `_. - """ - - FILE_SEARCH = "file_search" - FUNCTION = "function" - COMPUTER = "computer_use_preview" - WEB_SEARCH = "web_search_preview" - IMAGE_GENERATION = "image_generation" - CODE_INTERPRETER = "code_interpreter" - MCP = "mcp" - - -class ToolChoiceOptions(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Controls which (if any) tool is called by the model. +class SearchContextSize(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of SearchContextSize.""" - ``none`` means the model will not call any tool and instead generates a message. + LOW = "low" + MEDIUM = "medium" + HIGH = "high" - ``auto`` means the model can pick between generating a message or calling one or - more tools. - ``required`` means the model must call one or more tools. - """ +class TextResponseFormatConfigurationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of TextResponseFormatConfigurationType.""" - NONE = "none" - AUTO = "auto" - REQUIRED = "required" + TEXT = "text" + JSON_SCHEMA = "json_schema" + JSON_OBJECT = "json_object" class ToolType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """A tool that can be used to generate a response.""" + """Type of ToolType.""" - FILE_SEARCH = "file_search" FUNCTION = "function" + FILE_SEARCH = "file_search" COMPUTER_USE_PREVIEW = "computer_use_preview" - WEB_SEARCH_PREVIEW = "web_search_preview" + WEB_SEARCH = "web_search" MCP = "mcp" CODE_INTERPRETER = "code_interpreter" IMAGE_GENERATION = "image_generation" LOCAL_SHELL = "local_shell" - BING_GROUNDING = "bing_grounding" + SHELL = "shell" + CUSTOM = "custom" + WEB_SEARCH_PREVIEW = "web_search_preview" + APPLY_PATCH = "apply_patch" + A2A_PREVIEW = "a2a_preview" + BING_CUSTOM_SEARCH_PREVIEW = "bing_custom_search_preview" BROWSER_AUTOMATION_PREVIEW = "browser_automation_preview" FABRIC_DATAAGENT_PREVIEW = "fabric_dataagent_preview" SHAREPOINT_GROUNDING_PREVIEW = "sharepoint_grounding_preview" AZURE_AI_SEARCH = "azure_ai_search" - OPENAPI = "openapi" - BING_CUSTOM_SEARCH_PREVIEW = "bing_custom_search_preview" - CAPTURE_STRUCTURED_OUTPUTS = "capture_structured_outputs" - A2A_PREVIEW = "a2a_preview" AZURE_FUNCTION = "azure_function" + BING_GROUNDING = "bing_grounding" + CAPTURE_STRUCTURED_OUTPUTS = "capture_structured_outputs" + OPENAPI = "openapi" MEMORY_SEARCH = "memory_search" @@ -765,11 +805,3 @@ class TriggerType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Recurrence based trigger.""" ONE_TIME = "OneTime" """One-time trigger.""" - - -class WebSearchActionType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Type of WebSearchActionType.""" - - SEARCH = "search" - OPEN_PAGE = "open_page" - FIND = "find" diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_models.py b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_models.py index 14f27ed091ef..148f0517a776 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_models.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_models.py @@ -15,35 +15,36 @@ from ._enums import ( AgentKind, AnnotationType, - CodeInterpreterOutputType, + ApplyPatchFileOperationType, + ApplyPatchOperationParamType, ComputerActionType, - ComputerToolCallOutputItemOutputType, CredentialType, + CustomToolParamFormatType, DatasetType, DeploymentType, EvaluationRuleActionType, EvaluationTaxonomyInputType, EvaluatorDefinitionType, + FunctionAndCustomToolCallOutputType, + FunctionShellCallOutputOutcomeParamType, + FunctionShellCallOutputOutcomeType, IndexType, + InputContentType, + InputItemType, InsightType, - ItemContentType, - ItemType, - LocationType, + ItemResourceType, MemoryItemKind, MemoryStoreKind, OpenApiAuthType, + OutputContentType, + OutputMessageContentType, PendingUploadType, - ReasoningItemSummaryPartType, RecurrenceType, - ResponseStreamEventType, - ResponseTextFormatConfigurationType, - ResponsesMessageRole, SampleType, ScheduleTaskType, - ToolChoiceObjectType, + TextResponseFormatConfigurationType, ToolType, TriggerType, - WebSearchActionType, ) if TYPE_CHECKING: @@ -51,31 +52,33 @@ class Tool(_Model): - """Tool. + """A tool that can be used to generate a response. You probably want to use the sub-classes and not this class directly. Known sub-classes are: - A2ATool, AzureAISearchAgentTool, AzureFunctionAgentTool, BingCustomSearchAgentTool, - BingGroundingAgentTool, BrowserAutomationAgentTool, CaptureStructuredOutputsTool, - CodeInterpreterTool, ComputerUsePreviewTool, MicrosoftFabricAgentTool, FileSearchTool, - FunctionTool, ImageGenTool, LocalShellTool, MCPTool, MemorySearchTool, OpenApiAgentTool, - SharepointAgentTool, WebSearchPreviewTool - - :ivar type: Required. Known values are: "file_search", "function", "computer_use_preview", - "web_search_preview", "mcp", "code_interpreter", "image_generation", "local_shell", - "bing_grounding", "browser_automation_preview", "fabric_dataagent_preview", - "sharepoint_grounding_preview", "azure_ai_search", "openapi", "bing_custom_search_preview", - "capture_structured_outputs", "a2a_preview", "azure_function", and "memory_search". + A2APreviewTool, ApplyPatchToolParam, AzureAISearchTool, AzureFunctionTool, + BingCustomSearchPreviewTool, BingGroundingTool, BrowserAutomationPreviewTool, + CaptureStructuredOutputsTool, CodeInterpreterTool, ComputerUsePreviewTool, CustomToolParam, + MicrosoftFabricPreviewTool, FileSearchTool, FunctionTool, ImageGenTool, LocalShellToolParam, + MCPTool, MemorySearchPreviewTool, OpenApiTool, SharepointPreviewTool, FunctionShellToolParam, + WebSearchTool, WebSearchPreviewTool + + :ivar type: Required. Known values are: "function", "file_search", "computer_use_preview", + "web_search", "mcp", "code_interpreter", "image_generation", "local_shell", "shell", "custom", + "web_search_preview", "apply_patch", "a2a_preview", "bing_custom_search_preview", + "browser_automation_preview", "fabric_dataagent_preview", "sharepoint_grounding_preview", + "azure_ai_search", "azure_function", "bing_grounding", "capture_structured_outputs", "openapi", + and "memory_search". :vartype type: str or ~azure.ai.projects.models.ToolType """ __mapping__: dict[str, _Model] = {} type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) - """Required. Known values are: \"file_search\", \"function\", \"computer_use_preview\", - \"web_search_preview\", \"mcp\", \"code_interpreter\", \"image_generation\", \"local_shell\", - \"bing_grounding\", \"browser_automation_preview\", \"fabric_dataagent_preview\", - \"sharepoint_grounding_preview\", \"azure_ai_search\", \"openapi\", - \"bing_custom_search_preview\", \"capture_structured_outputs\", \"a2a_preview\", - \"azure_function\", and \"memory_search\".""" + """Required. Known values are: \"function\", \"file_search\", \"computer_use_preview\", + \"web_search\", \"mcp\", \"code_interpreter\", \"image_generation\", \"local_shell\", + \"shell\", \"custom\", \"web_search_preview\", \"apply_patch\", \"a2a_preview\", + \"bing_custom_search_preview\", \"browser_automation_preview\", \"fabric_dataagent_preview\", + \"sharepoint_grounding_preview\", \"azure_ai_search\", \"azure_function\", \"bing_grounding\", + \"capture_structured_outputs\", \"openapi\", and \"memory_search\".""" @overload def __init__( @@ -95,10 +98,10 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class A2ATool(Tool, discriminator="a2a_preview"): +class A2APreviewTool(Tool, discriminator="a2a_preview"): """An agent implementing the A2A protocol. - :ivar type: The type of the tool. Always ``a2a``. Required. + :ivar type: The type of the tool. Always ``"a2a_preview``. Required. :vartype type: str or ~azure.ai.projects.models.A2A_PREVIEW :ivar base_url: Base URL of the agent. :vartype base_url: str @@ -112,7 +115,7 @@ class A2ATool(Tool, discriminator="a2a_preview"): """ type: Literal[ToolType.A2A_PREVIEW] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the tool. Always ``a2a``. Required.""" + """The type of the tool. Always ``\"a2a_preview``. Required.""" base_url: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """Base URL of the agent.""" agent_card_path: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -746,20 +749,20 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class Annotation(_Model): - """Annotation. + """An annotation that applies to a span of output text. You probably want to use the sub-classes and not this class directly. Known sub-classes are: - AnnotationFileCitation, AnnotationFilePath, AnnotationUrlCitation + ContainerFileCitationBody, FileCitationBody, FilePath, UrlCitationBody - :ivar type: Required. Known values are: "file_citation", "url_citation", "file_path", and - "container_file_citation". + :ivar type: Required. Known values are: "file_citation", "url_citation", + "container_file_citation", and "file_path". :vartype type: str or ~azure.ai.projects.models.AnnotationType """ __mapping__: dict[str, _Model] = {} type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) - """Required. Known values are: \"file_citation\", \"url_citation\", \"file_path\", and - \"container_file_citation\".""" + """Required. Known values are: \"file_citation\", \"url_citation\", \"container_file_citation\", + and \"file_path\".""" @overload def __init__( @@ -779,35 +782,21 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class AnnotationFileCitation(Annotation, discriminator="file_citation"): - """A citation to a file. +class ApiErrorResponse(_Model): + """Error response for API failures. - :ivar type: The type of the file citation. Always ``file_citation``. Required. - :vartype type: str or ~azure.ai.projects.models.FILE_CITATION - :ivar file_id: The ID of the file. Required. - :vartype file_id: str - :ivar index: The index of the file in the list of files. Required. - :vartype index: int - :ivar filename: The filename of the file cited. Required. - :vartype filename: str + :ivar error: Required. + :vartype error: ~azure.ai.projects.models.Error """ - type: Literal[AnnotationType.FILE_CITATION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the file citation. Always ``file_citation``. Required.""" - file_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The ID of the file. Required.""" - index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the file in the list of files. Required.""" - filename: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The filename of the file cited. Required.""" + error: "_models.Error" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required.""" @overload def __init__( self, *, - file_id: str, - index: int, - filename: str, + error: "_models.Error", ) -> None: ... @overload @@ -819,33 +808,58 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = AnnotationType.FILE_CITATION # type: ignore -class AnnotationFilePath(Annotation, discriminator="file_path"): - """A path to a file. +class ApiKeyCredentials(BaseCredentials, discriminator="ApiKey"): + """API Key Credential definition. - :ivar type: The type of the file path. Always ``file_path``. Required. - :vartype type: str or ~azure.ai.projects.models.FILE_PATH - :ivar file_id: The ID of the file. Required. - :vartype file_id: str - :ivar index: The index of the file in the list of files. Required. - :vartype index: int + :ivar type: The credential type. Required. API Key credential + :vartype type: str or ~azure.ai.projects.models.API_KEY + :ivar api_key: API Key. + :vartype api_key: str """ - type: Literal[AnnotationType.FILE_PATH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the file path. Always ``file_path``. Required.""" - file_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The ID of the file. Required.""" - index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the file in the list of files. Required.""" + type: Literal[CredentialType.API_KEY] = rest_discriminator(name="type", visibility=["read"]) # type: ignore + """The credential type. Required. API Key credential""" + api_key: Optional[str] = rest_field(name="key", visibility=["read"]) + """API Key.""" + + @overload + def __init__( + self, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = CredentialType.API_KEY # type: ignore + + +class ApplyPatchFileOperation(_Model): + """Apply patch operation. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + ApplyPatchCreateFileOperation, ApplyPatchDeleteFileOperation, ApplyPatchUpdateFileOperation + + :ivar type: Required. Known values are: "create_file", "delete_file", and "update_file". + :vartype type: str or ~azure.ai.projects.models.ApplyPatchFileOperationType + """ + + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Required. Known values are: \"create_file\", \"delete_file\", and \"update_file\".""" @overload def __init__( self, *, - file_id: str, - index: int, + type: str, ) -> None: ... @overload @@ -857,44 +871,32 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = AnnotationType.FILE_PATH # type: ignore -class AnnotationUrlCitation(Annotation, discriminator="url_citation"): - """A citation for a web resource used to generate a model response. +class ApplyPatchCreateFileOperation(ApplyPatchFileOperation, discriminator="create_file"): + """Apply patch create file operation. - :ivar type: The type of the URL citation. Always ``url_citation``. Required. - :vartype type: str or ~azure.ai.projects.models.URL_CITATION - :ivar url: The URL of the web resource. Required. - :vartype url: str - :ivar start_index: The index of the first character of the URL citation in the message. - Required. - :vartype start_index: int - :ivar end_index: The index of the last character of the URL citation in the message. Required. - :vartype end_index: int - :ivar title: The title of the web resource. Required. - :vartype title: str + :ivar type: Create a new file with the provided diff. Required. + :vartype type: str or ~azure.ai.projects.models.CREATE_FILE + :ivar path: Path of the file to create. Required. + :vartype path: str + :ivar diff: Diff to apply. Required. + :vartype diff: str """ - type: Literal[AnnotationType.URL_CITATION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the URL citation. Always ``url_citation``. Required.""" - url: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The URL of the web resource. Required.""" - start_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the first character of the URL citation in the message. Required.""" - end_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the last character of the URL citation in the message. Required.""" - title: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The title of the web resource. Required.""" + type: Literal[ApplyPatchFileOperationType.CREATE_FILE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Create a new file with the provided diff. Required.""" + path: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Path of the file to create. Required.""" + diff: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Diff to apply. Required.""" @overload def __init__( self, *, - url: str, - start_index: int, - end_index: int, - title: str, + path: str, + diff: str, ) -> None: ... @overload @@ -906,24 +908,29 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = AnnotationType.URL_CITATION # type: ignore + self.type = ApplyPatchFileOperationType.CREATE_FILE # type: ignore -class ApiErrorResponse(_Model): - """Error response for API failures. +class ApplyPatchOperationParam(_Model): + """Apply patch operation. - :ivar error: Required. - :vartype error: ~azure.ai.projects.models.Error + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + ApplyPatchCreateFileOperationParam, ApplyPatchDeleteFileOperationParam, + ApplyPatchUpdateFileOperationParam + + :ivar type: Required. Known values are: "create_file", "delete_file", and "update_file". + :vartype type: str or ~azure.ai.projects.models.ApplyPatchOperationParamType """ - error: "_models.Error" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Required.""" + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Required. Known values are: \"create_file\", \"delete_file\", and \"update_file\".""" @overload def __init__( self, *, - error: "_models.Error", + type: str, ) -> None: ... @overload @@ -937,23 +944,30 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class ApiKeyCredentials(BaseCredentials, discriminator="ApiKey"): - """API Key Credential definition. +class ApplyPatchCreateFileOperationParam(ApplyPatchOperationParam, discriminator="create_file"): + """Apply patch create file operation. - :ivar type: The credential type. Required. API Key credential - :vartype type: str or ~azure.ai.projects.models.API_KEY - :ivar api_key: API Key. - :vartype api_key: str + :ivar type: The operation type. Always ``create_file``. Required. + :vartype type: str or ~azure.ai.projects.models.CREATE_FILE + :ivar path: Path of the file to create relative to the workspace root. Required. + :vartype path: str + :ivar diff: Unified diff content to apply when creating the file. Required. + :vartype diff: str """ - type: Literal[CredentialType.API_KEY] = rest_discriminator(name="type", visibility=["read"]) # type: ignore - """The credential type. Required. API Key credential""" - api_key: Optional[str] = rest_field(name="key", visibility=["read"]) - """API Key.""" + type: Literal[ApplyPatchOperationParamType.CREATE_FILE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The operation type. Always ``create_file``. Required.""" + path: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Path of the file to create relative to the workspace root. Required.""" + diff: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Unified diff content to apply when creating the file. Required.""" @overload def __init__( self, + *, + path: str, + diff: str, ) -> None: ... @overload @@ -965,28 +979,164 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = CredentialType.API_KEY # type: ignore + self.type = ApplyPatchOperationParamType.CREATE_FILE # type: ignore -class Location(_Model): - """Location. +class ApplyPatchDeleteFileOperation(ApplyPatchFileOperation, discriminator="delete_file"): + """Apply patch delete file operation. - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - ApproximateLocation + :ivar type: Delete the specified file. Required. + :vartype type: str or ~azure.ai.projects.models.DELETE_FILE + :ivar path: Path of the file to delete. Required. + :vartype path: str + """ + + type: Literal[ApplyPatchFileOperationType.DELETE_FILE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Delete the specified file. Required.""" + path: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Path of the file to delete. Required.""" + + @overload + def __init__( + self, + *, + path: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ - :ivar type: Required. "approximate" - :vartype type: str or ~azure.ai.projects.models.LocationType + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ApplyPatchFileOperationType.DELETE_FILE # type: ignore + + +class ApplyPatchDeleteFileOperationParam(ApplyPatchOperationParam, discriminator="delete_file"): + """Apply patch delete file operation. + + :ivar type: The operation type. Always ``delete_file``. Required. + :vartype type: str or ~azure.ai.projects.models.DELETE_FILE + :ivar path: Path of the file to delete relative to the workspace root. Required. + :vartype path: str """ - __mapping__: dict[str, _Model] = {} - type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) - """Required. \"approximate\"""" + type: Literal[ApplyPatchOperationParamType.DELETE_FILE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The operation type. Always ``delete_file``. Required.""" + path: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Path of the file to delete relative to the workspace root. Required.""" @overload def __init__( self, *, - type: str, + path: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ApplyPatchOperationParamType.DELETE_FILE # type: ignore + + +class ApplyPatchToolParam(Tool, discriminator="apply_patch"): + """Apply patch tool. + + :ivar type: The type of the tool. Always ``apply_patch``. Required. + :vartype type: str or ~azure.ai.projects.models.APPLY_PATCH + """ + + type: Literal[ToolType.APPLY_PATCH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the tool. Always ``apply_patch``. Required.""" + + @overload + def __init__( + self, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolType.APPLY_PATCH # type: ignore + + +class ApplyPatchUpdateFileOperation(ApplyPatchFileOperation, discriminator="update_file"): + """Apply patch update file operation. + + :ivar type: Update an existing file with the provided diff. Required. + :vartype type: str or ~azure.ai.projects.models.UPDATE_FILE + :ivar path: Path of the file to update. Required. + :vartype path: str + :ivar diff: Diff to apply. Required. + :vartype diff: str + """ + + type: Literal[ApplyPatchFileOperationType.UPDATE_FILE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Update an existing file with the provided diff. Required.""" + path: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Path of the file to update. Required.""" + diff: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Diff to apply. Required.""" + + @overload + def __init__( + self, + *, + path: str, + diff: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ApplyPatchFileOperationType.UPDATE_FILE # type: ignore + + +class ApplyPatchUpdateFileOperationParam(ApplyPatchOperationParam, discriminator="update_file"): + """Apply patch update file operation. + + :ivar type: The operation type. Always ``update_file``. Required. + :vartype type: str or ~azure.ai.projects.models.UPDATE_FILE + :ivar path: Path of the file to update relative to the workspace root. Required. + :vartype path: str + :ivar diff: Unified diff content to apply to the existing file. Required. + :vartype diff: str + """ + + type: Literal[ApplyPatchOperationParamType.UPDATE_FILE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The operation type. Always ``update_file``. Required.""" + path: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Path of the file to update relative to the workspace root. Required.""" + diff: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Unified diff content to apply to the existing file. Required.""" + + @overload + def __init__( + self, + *, + path: str, + diff: str, ) -> None: ... @overload @@ -998,13 +1148,15 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) + self.type = ApplyPatchOperationParamType.UPDATE_FILE # type: ignore -class ApproximateLocation(Location, discriminator="approximate"): +class ApproximateLocation(_Model): """ApproximateLocation. - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.APPROXIMATE + :ivar type: The type of location approximation. Always ``approximate``. Required. Default value + is "approximate". + :vartype type: str :ivar country: :vartype country: str :ivar region: @@ -1015,8 +1167,9 @@ class ApproximateLocation(Location, discriminator="approximate"): :vartype timezone: str """ - type: Literal[LocationType.APPROXIMATE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" + type: Literal["approximate"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The type of location approximation. Always ``approximate``. Required. Default value is + \"approximate\".""" country: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) region: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) city: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -1041,14 +1194,14 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = LocationType.APPROXIMATE # type: ignore + self.type: Literal["approximate"] = "approximate" class Target(_Model): """Base class for targets with discriminator support. You probably want to use the sub-classes and not this class directly. Known sub-classes are: - AzureAIAgentTarget, AzureAIAssistantTarget, AzureAIModelTarget + AzureAIAgentTarget, AzureAIModelTarget :ivar type: The type of target. Required. Default value is None. :vartype type: str @@ -1123,41 +1276,6 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: self.type = "azure_ai_agent" # type: ignore -class AzureAISearchAgentTool(Tool, discriminator="azure_ai_search"): - """The input definition information for an Azure AI search tool as used to configure an agent. - - :ivar type: The object type, which is always 'azure_ai_search'. Required. - :vartype type: str or ~azure.ai.projects.models.AZURE_AI_SEARCH - :ivar azure_ai_search: The azure ai search index resource. Required. - :vartype azure_ai_search: ~azure.ai.projects.models.AzureAISearchToolResource - """ - - type: Literal[ToolType.AZURE_AI_SEARCH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The object type, which is always 'azure_ai_search'. Required.""" - azure_ai_search: "_models.AzureAISearchToolResource" = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The azure ai search index resource. Required.""" - - @overload - def __init__( - self, - *, - azure_ai_search: "_models.AzureAISearchToolResource", - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ToolType.AZURE_AI_SEARCH # type: ignore - - class Index(_Model): """Index resource Definition. @@ -1269,25 +1387,27 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: self.type = IndexType.AZURE_SEARCH # type: ignore -class AzureAISearchToolResource(_Model): - """A set of index resources used by the ``azure_ai_search`` tool. +class AzureAISearchTool(Tool, discriminator="azure_ai_search"): + """The input definition information for an Azure AI search tool as used to configure an agent. - :ivar indexes: The indices attached to this agent. There can be a maximum of 1 index - resource attached to the agent. Required. - :vartype indexes: list[~azure.ai.projects.models.AISearchIndexResource] + :ivar type: The object type, which is always 'azure_ai_search'. Required. + :vartype type: str or ~azure.ai.projects.models.AZURE_AI_SEARCH + :ivar azure_ai_search: The azure ai search index resource. Required. + :vartype azure_ai_search: ~azure.ai.projects.models.AzureAISearchToolResource """ - indexes: list["_models.AISearchIndexResource"] = rest_field( + type: Literal[ToolType.AZURE_AI_SEARCH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The object type, which is always 'azure_ai_search'. Required.""" + azure_ai_search: "_models.AzureAISearchToolResource" = rest_field( visibility=["read", "create", "update", "delete", "query"] ) - """The indices attached to this agent. There can be a maximum of 1 index - resource attached to the agent. Required.""" + """The azure ai search index resource. Required.""" @overload def __init__( self, *, - indexes: list["_models.AISearchIndexResource"], + azure_ai_search: "_models.AzureAISearchToolResource", ) -> None: ... @overload @@ -1299,29 +1419,28 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) + self.type = ToolType.AZURE_AI_SEARCH # type: ignore -class AzureFunctionAgentTool(Tool, discriminator="azure_function"): - """The input definition information for an Azure Function Tool, as used to configure an Agent. +class AzureAISearchToolResource(_Model): + """A set of index resources used by the ``azure_ai_search`` tool. - :ivar type: The object type, which is always 'browser_automation'. Required. - :vartype type: str or ~azure.ai.projects.models.AZURE_FUNCTION - :ivar azure_function: The Azure Function Tool definition. Required. - :vartype azure_function: ~azure.ai.projects.models.AzureFunctionDefinition + :ivar indexes: The indices attached to this agent. There can be a maximum of 1 index + resource attached to the agent. Required. + :vartype indexes: list[~azure.ai.projects.models.AISearchIndexResource] """ - type: Literal[ToolType.AZURE_FUNCTION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The object type, which is always 'browser_automation'. Required.""" - azure_function: "_models.AzureFunctionDefinition" = rest_field( + indexes: list["_models.AISearchIndexResource"] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) - """The Azure Function Tool definition. Required.""" + """The indices attached to this agent. There can be a maximum of 1 index + resource attached to the agent. Required.""" @overload def __init__( self, *, - azure_function: "_models.AzureFunctionDefinition", + indexes: list["_models.AISearchIndexResource"], ) -> None: ... @overload @@ -1333,7 +1452,6 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ToolType.AZURE_FUNCTION # type: ignore class AzureFunctionBinding(_Model): @@ -1496,6 +1614,41 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) +class AzureFunctionTool(Tool, discriminator="azure_function"): + """The input definition information for an Azure Function Tool, as used to configure an Agent. + + :ivar type: The object type, which is always 'browser_automation'. Required. + :vartype type: str or ~azure.ai.projects.models.AZURE_FUNCTION + :ivar azure_function: The Azure Function Tool definition. Required. + :vartype azure_function: ~azure.ai.projects.models.AzureFunctionDefinition + """ + + type: Literal[ToolType.AZURE_FUNCTION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The object type, which is always 'browser_automation'. Required.""" + azure_function: "_models.AzureFunctionDefinition" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The Azure Function Tool definition. Required.""" + + @overload + def __init__( + self, + *, + azure_function: "_models.AzureFunctionDefinition", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolType.AZURE_FUNCTION # type: ignore + + class TargetConfig(_Model): """Abstract class for target configuration. @@ -1567,41 +1720,6 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: self.type = "AzureOpenAIModel" # type: ignore -class BingCustomSearchAgentTool(Tool, discriminator="bing_custom_search_preview"): - """The input definition information for a Bing custom search tool as used to configure an agent. - - :ivar type: The object type, which is always 'bing_custom_search'. Required. - :vartype type: str or ~azure.ai.projects.models.BING_CUSTOM_SEARCH_PREVIEW - :ivar bing_custom_search_preview: The bing custom search tool parameters. Required. - :vartype bing_custom_search_preview: ~azure.ai.projects.models.BingCustomSearchToolParameters - """ - - type: Literal[ToolType.BING_CUSTOM_SEARCH_PREVIEW] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The object type, which is always 'bing_custom_search'. Required.""" - bing_custom_search_preview: "_models.BingCustomSearchToolParameters" = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The bing custom search tool parameters. Required.""" - - @overload - def __init__( - self, - *, - bing_custom_search_preview: "_models.BingCustomSearchToolParameters", - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ToolType.BING_CUSTOM_SEARCH_PREVIEW # type: ignore - - class BingCustomSearchConfiguration(_Model): """A bing custom search configuration. @@ -1657,26 +1775,27 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BingCustomSearchToolParameters(_Model): - """The bing custom search tool parameters. +class BingCustomSearchPreviewTool(Tool, discriminator="bing_custom_search_preview"): + """The input definition information for a Bing custom search tool as used to configure an agent. - :ivar search_configurations: The project connections attached to this tool. There can be a - maximum of 1 connection - resource attached to the tool. Required. - :vartype search_configurations: list[~azure.ai.projects.models.BingCustomSearchConfiguration] + :ivar type: The object type, which is always 'bing_custom_search_preview'. Required. + :vartype type: str or ~azure.ai.projects.models.BING_CUSTOM_SEARCH_PREVIEW + :ivar bing_custom_search_preview: The bing custom search tool parameters. Required. + :vartype bing_custom_search_preview: ~azure.ai.projects.models.BingCustomSearchToolParameters """ - search_configurations: list["_models.BingCustomSearchConfiguration"] = rest_field( + type: Literal[ToolType.BING_CUSTOM_SEARCH_PREVIEW] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The object type, which is always 'bing_custom_search_preview'. Required.""" + bing_custom_search_preview: "_models.BingCustomSearchToolParameters" = rest_field( visibility=["read", "create", "update", "delete", "query"] ) - """The project connections attached to this tool. There can be a maximum of 1 connection - resource attached to the tool. Required.""" + """The bing custom search tool parameters. Required.""" @overload def __init__( self, *, - search_configurations: list["_models.BingCustomSearchConfiguration"], + bing_custom_search_preview: "_models.BingCustomSearchToolParameters", ) -> None: ... @overload @@ -1688,30 +1807,29 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) + self.type = ToolType.BING_CUSTOM_SEARCH_PREVIEW # type: ignore -class BingGroundingAgentTool(Tool, discriminator="bing_grounding"): - """The input definition information for a bing grounding search tool as used to configure an - agent. +class BingCustomSearchToolParameters(_Model): + """The bing custom search tool parameters. - :ivar type: The object type, which is always 'bing_grounding'. Required. - :vartype type: str or ~azure.ai.projects.models.BING_GROUNDING - :ivar bing_grounding: The bing grounding search tool parameters. Required. - :vartype bing_grounding: ~azure.ai.projects.models.BingGroundingSearchToolParameters + :ivar search_configurations: The project connections attached to this tool. There can be a + maximum of 1 connection + resource attached to the tool. Required. + :vartype search_configurations: list[~azure.ai.projects.models.BingCustomSearchConfiguration] """ - type: Literal[ToolType.BING_GROUNDING] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The object type, which is always 'bing_grounding'. Required.""" - bing_grounding: "_models.BingGroundingSearchToolParameters" = rest_field( + search_configurations: list["_models.BingCustomSearchConfiguration"] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) - """The bing grounding search tool parameters. Required.""" + """The project connections attached to this tool. There can be a maximum of 1 connection + resource attached to the tool. Required.""" @overload def __init__( self, *, - bing_grounding: "_models.BingGroundingSearchToolParameters", + search_configurations: list["_models.BingCustomSearchConfiguration"], ) -> None: ... @overload @@ -1723,7 +1841,6 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ToolType.BING_GROUNDING # type: ignore class BingGroundingSearchConfiguration(_Model): @@ -1810,6 +1927,42 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) +class BingGroundingTool(Tool, discriminator="bing_grounding"): + """The input definition information for a bing grounding search tool as used to configure an + agent. + + :ivar type: The object type, which is always 'bing_grounding'. Required. + :vartype type: str or ~azure.ai.projects.models.BING_GROUNDING + :ivar bing_grounding: The bing grounding search tool parameters. Required. + :vartype bing_grounding: ~azure.ai.projects.models.BingGroundingSearchToolParameters + """ + + type: Literal[ToolType.BING_GROUNDING] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The object type, which is always 'bing_grounding'. Required.""" + bing_grounding: "_models.BingGroundingSearchToolParameters" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The bing grounding search tool parameters. Required.""" + + @overload + def __init__( + self, + *, + bing_grounding: "_models.BingGroundingSearchToolParameters", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolType.BING_GROUNDING # type: ignore + + class BlobReference(_Model): """Blob reference details. @@ -1873,17 +2026,17 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: self.type: Literal["SAS"] = "SAS" -class BrowserAutomationAgentTool(Tool, discriminator="browser_automation_preview"): +class BrowserAutomationPreviewTool(Tool, discriminator="browser_automation_preview"): """The input definition information for a Browser Automation Tool, as used to configure an Agent. - :ivar type: The object type, which is always 'browser_automation'. Required. + :ivar type: The object type, which is always 'browser_automation_preview'. Required. :vartype type: str or ~azure.ai.projects.models.BROWSER_AUTOMATION_PREVIEW :ivar browser_automation_preview: The Browser Automation Tool parameters. Required. :vartype browser_automation_preview: ~azure.ai.projects.models.BrowserAutomationToolParameters """ type: Literal[ToolType.BROWSER_AUTOMATION_PREVIEW] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The object type, which is always 'browser_automation'. Required.""" + """The object type, which is always 'browser_automation_preview'. Required.""" browser_automation_preview: "_models.BrowserAutomationToolParameters" = rest_field( visibility=["read", "create", "update", "delete", "query"] ) @@ -2138,6 +2291,90 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: self.kind = MemoryItemKind.CHAT_SUMMARY # type: ignore +class ComputerAction(_Model): + """ComputerAction. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + ClickParam, DoubleClickAction, Drag, KeyPressAction, Move, Screenshot, Scroll, Type, Wait + + :ivar type: Required. Known values are: "click", "double_click", "drag", "keypress", "move", + "screenshot", "scroll", "type", and "wait". + :vartype type: str or ~azure.ai.projects.models.ComputerActionType + """ + + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Required. Known values are: \"click\", \"double_click\", \"drag\", \"keypress\", \"move\", + \"screenshot\", \"scroll\", \"type\", and \"wait\".""" + + @overload + def __init__( + self, + *, + type: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ClickParam(ComputerAction, discriminator="click"): + """Click. + + :ivar type: Specifies the event type. For a click action, this property is always ``click``. + Required. + :vartype type: str or ~azure.ai.projects.models.CLICK + :ivar button: Indicates which mouse button was pressed during the click. One of ``left``, + ``right``, ``wheel``, ``back``, or ``forward``. Required. Known values are: "left", "right", + "wheel", "back", and "forward". + :vartype button: str or ~azure.ai.projects.models.ClickButtonType + :ivar x: The x-coordinate where the click occurred. Required. + :vartype x: int + :ivar y: The y-coordinate where the click occurred. Required. + :vartype y: int + """ + + type: Literal[ComputerActionType.CLICK] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Specifies the event type. For a click action, this property is always ``click``. Required.""" + button: Union[str, "_models.ClickButtonType"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Indicates which mouse button was pressed during the click. One of ``left``, ``right``, + ``wheel``, ``back``, or ``forward``. Required. Known values are: \"left\", \"right\", + \"wheel\", \"back\", and \"forward\".""" + x: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The x-coordinate where the click occurred. Required.""" + y: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The y-coordinate where the click occurred. Required.""" + + @overload + def __init__( + self, + *, + button: Union[str, "_models.ClickButtonType"], + x: int, + y: int, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ComputerActionType.CLICK # type: ignore + + class ClusterInsightResult(_Model): """Insights from the cluster analysis. @@ -2358,25 +2595,32 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: self.type = EvaluatorDefinitionType.CODE # type: ignore -class CodeInterpreterOutput(_Model): - """CodeInterpreterOutput. - - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - CodeInterpreterOutputImage, CodeInterpreterOutputLogs +class CodeInterpreterContainerAuto(_Model): + """CodeInterpreterToolAuto. - :ivar type: Required. Known values are: "logs" and "image". - :vartype type: str or ~azure.ai.projects.models.CodeInterpreterOutputType + :ivar type: Always ``auto``. Required. Default value is "auto". + :vartype type: str + :ivar file_ids: An optional list of uploaded files to make available to your code. + :vartype file_ids: list[str] + :ivar memory_limit: Known values are: "1g", "4g", "16g", and "64g". + :vartype memory_limit: str or ~azure.ai.projects.models.ContainerMemoryLimit """ - __mapping__: dict[str, _Model] = {} - type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) - """Required. Known values are: \"logs\" and \"image\".""" - + type: Literal["auto"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Always ``auto``. Required. Default value is \"auto\".""" + file_ids: Optional[list[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """An optional list of uploaded files to make available to your code.""" + memory_limit: Optional[Union[str, "_models.ContainerMemoryLimit"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Known values are: \"1g\", \"4g\", \"16g\", and \"64g\".""" + @overload def __init__( self, *, - type: str, + file_ids: Optional[list[str]] = None, + memory_limit: Optional[Union[str, "_models.ContainerMemoryLimit"]] = None, ) -> None: ... @overload @@ -2388,19 +2632,20 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) + self.type: Literal["auto"] = "auto" -class CodeInterpreterOutputImage(CodeInterpreterOutput, discriminator="image"): - """The image output from the code interpreter. +class CodeInterpreterOutputImage(_Model): + """Code interpreter output image. - :ivar type: The type of the output. Always 'image'. Required. - :vartype type: str or ~azure.ai.projects.models.IMAGE + :ivar type: The type of the output. Always ``image``. Required. Default value is "image". + :vartype type: str :ivar url: The URL of the image output from the code interpreter. Required. :vartype url: str """ - type: Literal[CodeInterpreterOutputType.IMAGE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the output. Always 'image'. Required.""" + type: Literal["image"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The type of the output. Always ``image``. Required. Default value is \"image\".""" url: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The URL of the image output from the code interpreter. Required.""" @@ -2420,20 +2665,20 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = CodeInterpreterOutputType.IMAGE # type: ignore + self.type: Literal["image"] = "image" -class CodeInterpreterOutputLogs(CodeInterpreterOutput, discriminator="logs"): - """The logs output from the code interpreter. +class CodeInterpreterOutputLogs(_Model): + """Code interpreter output logs. - :ivar type: The type of the output. Always 'logs'. Required. - :vartype type: str or ~azure.ai.projects.models.LOGS + :ivar type: The type of the output. Always ``logs``. Required. Default value is "logs". + :vartype type: str :ivar logs: The logs output from the code interpreter. Required. :vartype logs: str """ - type: Literal[CodeInterpreterOutputType.LOGS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the output. Always 'logs'. Required.""" + type: Literal["logs"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The type of the output. Always ``logs``. Required. Default value is \"logs\".""" logs: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The logs output from the code interpreter. Required.""" @@ -2453,34 +2698,36 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = CodeInterpreterOutputType.LOGS # type: ignore + self.type: Literal["logs"] = "logs" class CodeInterpreterTool(Tool, discriminator="code_interpreter"): - """A tool that runs Python code to help generate a response to a prompt. + """Code interpreter. :ivar type: The type of the code interpreter tool. Always ``code_interpreter``. Required. :vartype type: str or ~azure.ai.projects.models.CODE_INTERPRETER :ivar container: The code interpreter container. Can be a container ID or an object that - specifies uploaded file IDs to make available to your code. Required. Is either a str type or a - CodeInterpreterToolAuto type. - :vartype container: str or ~azure.ai.projects.models.CodeInterpreterToolAuto + specifies uploaded file IDs to make available to your code, along with an + optional ``memory_limit`` setting. Required. Is either a str type or a + CodeInterpreterContainerAuto type. + :vartype container: str or ~azure.ai.projects.models.CodeInterpreterContainerAuto """ type: Literal[ToolType.CODE_INTERPRETER] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore """The type of the code interpreter tool. Always ``code_interpreter``. Required.""" - container: Union[str, "_models.CodeInterpreterToolAuto"] = rest_field( + container: Union[str, "_models.CodeInterpreterContainerAuto"] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) """The code interpreter container. Can be a container ID or an object that - specifies uploaded file IDs to make available to your code. Required. Is either a str type or a - CodeInterpreterToolAuto type.""" + specifies uploaded file IDs to make available to your code, along with an + optional ``memory_limit`` setting. Required. Is either a str type or a + CodeInterpreterContainerAuto type.""" @overload def __init__( self, *, - container: Union[str, "_models.CodeInterpreterToolAuto"], + container: Union[str, "_models.CodeInterpreterContainerAuto"], ) -> None: ... @overload @@ -2495,26 +2742,61 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: self.type = ToolType.CODE_INTERPRETER # type: ignore -class CodeInterpreterToolAuto(_Model): - """Configuration for a code interpreter container. Optionally specify the IDs - of the files to run the code on. - - :ivar type: Always ``auto``. Required. Default value is "auto". - :vartype type: str - :ivar file_ids: An optional list of uploaded files to make available to your code. - :vartype file_ids: list[str] +class ComparisonFilter(_Model): + """Comparison Filter. + + :ivar type: Specifies the comparison operator: ``eq``, ``ne``, ``gt``, ``gte``, ``lt``, + ``lte``, ``in``, ``nin``. + + * `eq`: equals + * `ne`: not equal + * `gt`: greater than + * `gte`: greater than or equal + * `lt`: less than + * `lte`: less than or equal + * `in`: in + * `nin`: not in. Required. Is one of the following types: Literal["eq"], Literal["ne"], + Literal["gt"], Literal["gte"], Literal["lt"], Literal["lte"] + :vartype type: str or str or str or str or str or str + :ivar key: The key to compare against the value. Required. + :vartype key: str + :ivar value: The value to compare against the attribute key; supports string, number, or + boolean types. Required. Is one of the following types: str, float, bool, + ["_types.ComparisonFilterValueItems"] + :vartype value: str or float or bool or list[str or float] """ - type: Literal["auto"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Always ``auto``. Required. Default value is \"auto\".""" - file_ids: Optional[list[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """An optional list of uploaded files to make available to your code.""" + type: Literal["eq", "ne", "gt", "gte", "lt", "lte"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Specifies the comparison operator: ``eq``, ``ne``, ``gt``, ``gte``, ``lt``, ``lte``, ``in``, + ``nin``. + + * `eq`: equals + * `ne`: not equal + * `gt`: greater than + * `gte`: greater than or equal + * `lt`: less than + * `lte`: less than or equal + * `in`: in + * `nin`: not in. Required. Is one of the following types: Literal[\"eq\"], + Literal[\"ne\"], Literal[\"gt\"], Literal[\"gte\"], Literal[\"lt\"], Literal[\"lte\"]""" + key: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The key to compare against the value. Required.""" + value: Union[str, float, bool, list["_types.ComparisonFilterValueItems"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The value to compare against the attribute key; supports string, number, or boolean types. + Required. Is one of the following types: str, float, bool, + [\"_types.ComparisonFilterValueItems\"]""" @overload def __init__( self, *, - file_ids: Optional[list[str]] = None, + type: Literal["eq", "ne", "gt", "gte", "lt", "lte"], + key: str, + value: Union[str, float, bool, list["_types.ComparisonFilterValueItems"]], ) -> None: ... @overload @@ -2526,43 +2808,33 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type: Literal["auto"] = "auto" -class ItemParam(_Model): - """Content item used to generate a response. +class CompoundFilter(_Model): + """Compound Filter. - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - CodeInterpreterToolCallItemParam, ComputerToolCallItemParam, ComputerToolCallOutputItemParam, - FileSearchToolCallItemParam, FunctionToolCallItemParam, FunctionToolCallOutputItemParam, - ImageGenToolCallItemParam, ItemReferenceItemParam, LocalShellToolCallItemParam, - LocalShellToolCallOutputItemParam, MCPApprovalRequestItemParam, MCPApprovalResponseItemParam, - MCPCallItemParam, MCPListToolsItemParam, MemorySearchToolCallItemParam, - ResponsesMessageItemParam, ReasoningItemParam, WebSearchToolCallItemParam - - :ivar type: Required. Known values are: "message", "file_search_call", "function_call", - "function_call_output", "computer_call", "computer_call_output", "web_search_call", - "reasoning", "item_reference", "image_generation_call", "code_interpreter_call", - "local_shell_call", "local_shell_call_output", "mcp_list_tools", "mcp_approval_request", - "mcp_approval_response", "mcp_call", "structured_outputs", "workflow_action", - "memory_search_call", and "oauth_consent_request". - :vartype type: str or ~azure.ai.projects.models.ItemType + :ivar type: Type of operation: ``and`` or ``or``. Required. Is either a Literal["and"] type or + a Literal["or"] type. + :vartype type: str or str + :ivar filters: Array of filters to combine. Items can be ``ComparisonFilter`` or + ``CompoundFilter``. Required. + :vartype filters: list[~azure.ai.projects.models.ComparisonFilter or any] """ - __mapping__: dict[str, _Model] = {} - type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) - """Required. Known values are: \"message\", \"file_search_call\", \"function_call\", - \"function_call_output\", \"computer_call\", \"computer_call_output\", \"web_search_call\", - \"reasoning\", \"item_reference\", \"image_generation_call\", \"code_interpreter_call\", - \"local_shell_call\", \"local_shell_call_output\", \"mcp_list_tools\", - \"mcp_approval_request\", \"mcp_approval_response\", \"mcp_call\", \"structured_outputs\", - \"workflow_action\", \"memory_search_call\", and \"oauth_consent_request\".""" + type: Literal["and", "or"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Type of operation: ``and`` or ``or``. Required. Is either a Literal[\"and\"] type or a + Literal[\"or\"] type.""" + filters: list[Union["_models.ComparisonFilter", Any]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Array of filters to combine. Items can be ``ComparisonFilter`` or ``CompoundFilter``. Required.""" @overload def __init__( self, *, - type: str, + type: Literal["and", "or"], + filters: list[Union["_models.ComparisonFilter", Any]], ) -> None: ... @overload @@ -2576,39 +2848,29 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class CodeInterpreterToolCallItemParam(ItemParam, discriminator="code_interpreter_call"): - """A tool call to run code. +class ComputerCallSafetyCheckParam(_Model): + """A pending safety check for the computer call. - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.CODE_INTERPRETER_CALL - :ivar container_id: The ID of the container used to run the code. Required. - :vartype container_id: str - :ivar code: The code to run, or null if not available. Required. + :ivar id: The ID of the pending safety check. Required. + :vartype id: str + :ivar code: :vartype code: str - :ivar outputs: The outputs generated by the code interpreter, such as logs or images. - Can be null if no outputs are available. Required. - :vartype outputs: list[~azure.ai.projects.models.CodeInterpreterOutput] + :ivar message: + :vartype message: str """ - type: Literal[ItemType.CODE_INTERPRETER_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - container_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The ID of the container used to run the code. Required.""" - code: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The code to run, or null if not available. Required.""" - outputs: list["_models.CodeInterpreterOutput"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The outputs generated by the code interpreter, such as logs or images. - Can be null if no outputs are available. Required.""" + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the pending safety check. Required.""" + code: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + message: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) @overload def __init__( self, *, - container_id: str, - code: str, - outputs: list["_models.CodeInterpreterOutput"], + id: str, # pylint: disable=redefined-builtin + code: Optional[str] = None, + message: Optional[str] = None, ) -> None: ... @overload @@ -2620,55 +2882,34 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ItemType.CODE_INTERPRETER_CALL # type: ignore -class ItemResource(_Model): - """Content item used to generate a response. +class ComputerScreenshotImage(_Model): + """A computer screenshot image used with the computer use tool. - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - CodeInterpreterToolCallItemResource, ComputerToolCallItemResource, - ComputerToolCallOutputItemResource, FileSearchToolCallItemResource, - FunctionToolCallItemResource, FunctionToolCallOutputItemResource, ImageGenToolCallItemResource, - LocalShellToolCallItemResource, LocalShellToolCallOutputItemResource, - MCPApprovalRequestItemResource, MCPApprovalResponseItemResource, MCPCallItemResource, - MCPListToolsItemResource, MemorySearchToolCallItemResource, ResponsesMessageItemResource, - OAuthConsentRequestItemResource, ReasoningItemResource, StructuredOutputsItemResource, - WebSearchToolCallItemResource, WorkflowActionOutputItemResource - - :ivar type: Required. Known values are: "message", "file_search_call", "function_call", - "function_call_output", "computer_call", "computer_call_output", "web_search_call", - "reasoning", "item_reference", "image_generation_call", "code_interpreter_call", - "local_shell_call", "local_shell_call_output", "mcp_list_tools", "mcp_approval_request", - "mcp_approval_response", "mcp_call", "structured_outputs", "workflow_action", - "memory_search_call", and "oauth_consent_request". - :vartype type: str or ~azure.ai.projects.models.ItemType - :ivar id: Required. - :vartype id: str - :ivar created_by: The information about the creator of the item. - :vartype created_by: ~azure.ai.projects.models.CreatedBy + :ivar type: Specifies the event type. For a computer screenshot, this property is + always set to ``computer_screenshot``. Required. Default value is "computer_screenshot". + :vartype type: str + :ivar image_url: The URL of the screenshot image. + :vartype image_url: str + :ivar file_id: The identifier of an uploaded file that contains the screenshot. + :vartype file_id: str """ - __mapping__: dict[str, _Model] = {} - type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) - """Required. Known values are: \"message\", \"file_search_call\", \"function_call\", - \"function_call_output\", \"computer_call\", \"computer_call_output\", \"web_search_call\", - \"reasoning\", \"item_reference\", \"image_generation_call\", \"code_interpreter_call\", - \"local_shell_call\", \"local_shell_call_output\", \"mcp_list_tools\", - \"mcp_approval_request\", \"mcp_approval_response\", \"mcp_call\", \"structured_outputs\", - \"workflow_action\", \"memory_search_call\", and \"oauth_consent_request\".""" - id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Required.""" - created_by: Optional["_models.CreatedBy"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The information about the creator of the item.""" + type: Literal["computer_screenshot"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Specifies the event type. For a computer screenshot, this property is + always set to ``computer_screenshot``. Required. Default value is \"computer_screenshot\".""" + image_url: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The URL of the screenshot image.""" + file_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The identifier of an uploaded file that contains the screenshot.""" @overload def __init__( self, *, - type: str, - id: str, # pylint: disable=redefined-builtin - created_by: Optional["_models.CreatedBy"] = None, + image_url: Optional[str] = None, + file_id: Optional[str] = None, ) -> None: ... @overload @@ -2680,56 +2921,42 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) + self.type: Literal["computer_screenshot"] = "computer_screenshot" -class CodeInterpreterToolCallItemResource(ItemResource, discriminator="code_interpreter_call"): - """A tool call to run code. +class ComputerUsePreviewTool(Tool, discriminator="computer_use_preview"): + """Computer use preview. - :ivar id: Required. - :vartype id: str - :ivar created_by: The information about the creator of the item. - :vartype created_by: ~azure.ai.projects.models.CreatedBy - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.CODE_INTERPRETER_CALL - :ivar status: Required. Is one of the following types: Literal["in_progress"], - Literal["completed"], Literal["incomplete"], Literal["interpreting"], Literal["failed"] - :vartype status: str or str or str or str or str - :ivar container_id: The ID of the container used to run the code. Required. - :vartype container_id: str - :ivar code: The code to run, or null if not available. Required. - :vartype code: str - :ivar outputs: The outputs generated by the code interpreter, such as logs or images. - Can be null if no outputs are available. Required. - :vartype outputs: list[~azure.ai.projects.models.CodeInterpreterOutput] + :ivar type: The type of the computer use tool. Always ``computer_use_preview``. Required. + :vartype type: str or ~azure.ai.projects.models.COMPUTER_USE_PREVIEW + :ivar environment: The type of computer environment to control. Required. Known values are: + "windows", "mac", "linux", "ubuntu", and "browser". + :vartype environment: str or ~azure.ai.projects.models.ComputerEnvironment + :ivar display_width: The width of the computer display. Required. + :vartype display_width: int + :ivar display_height: The height of the computer display. Required. + :vartype display_height: int """ - type: Literal[ItemType.CODE_INTERPRETER_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - status: Literal["in_progress", "completed", "incomplete", "interpreting", "failed"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """Required. Is one of the following types: Literal[\"in_progress\"], Literal[\"completed\"], - Literal[\"incomplete\"], Literal[\"interpreting\"], Literal[\"failed\"]""" - container_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The ID of the container used to run the code. Required.""" - code: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The code to run, or null if not available. Required.""" - outputs: list["_models.CodeInterpreterOutput"] = rest_field( + type: Literal[ToolType.COMPUTER_USE_PREVIEW] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the computer use tool. Always ``computer_use_preview``. Required.""" + environment: Union[str, "_models.ComputerEnvironment"] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) - """The outputs generated by the code interpreter, such as logs or images. - Can be null if no outputs are available. Required.""" + """The type of computer environment to control. Required. Known values are: \"windows\", \"mac\", + \"linux\", \"ubuntu\", and \"browser\".""" + display_width: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The width of the computer display. Required.""" + display_height: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The height of the computer display. Required.""" @overload def __init__( self, *, - id: str, # pylint: disable=redefined-builtin - status: Literal["in_progress", "completed", "incomplete", "interpreting", "failed"], - container_id: str, - code: str, - outputs: list["_models.CodeInterpreterOutput"], - created_by: Optional["_models.CreatedBy"] = None, + environment: Union[str, "_models.ComputerEnvironment"], + display_width: int, + display_height: int, ) -> None: ... @overload @@ -2741,46 +2968,90 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ItemType.CODE_INTERPRETER_CALL # type: ignore + self.type = ToolType.COMPUTER_USE_PREVIEW # type: ignore -class ComparisonFilter(_Model): - """A filter used to compare a specified attribute key to a given value using a defined comparison - operation. +class Connection(_Model): + """Response from the list and get connections operations. - :ivar type: Specifies the comparison operator: - ``eq`` (equal), ``ne`` (not equal), ``gt`` (greater than), ``gte`` (greater than or equal), - ``lt`` (less than), ``lte`` (less than or equal). Required. Is one of the following types: - Literal["eq"], Literal["ne"], Literal["gt"], Literal["gte"], Literal["lt"], Literal["lte"] - :vartype type: str or str or str or str or str or str - :ivar key: The key to compare against the value. Required. - :vartype key: str - :ivar value: The value to compare against the attribute key; supports string, number, or - boolean types. Required. Is one of the following types: str, float, bool - :vartype value: str or float or bool + :ivar name: The friendly name of the connection, provided by the user. Required. + :vartype name: str + :ivar id: A unique identifier for the connection, generated by the service. Required. + :vartype id: str + :ivar type: Category of the connection. Required. Known values are: "AzureOpenAI", "AzureBlob", + "AzureStorageAccount", "CognitiveSearch", "CosmosDB", "ApiKey", "AppConfig", "AppInsights", + "CustomKeys", and "RemoteTool". + :vartype type: str or ~azure.ai.projects.models.ConnectionType + :ivar target: The connection URL to be used for this service. Required. + :vartype target: str + :ivar is_default: Whether the connection is tagged as the default connection of its type. + Required. + :vartype is_default: bool + :ivar credentials: The credentials used by the connection. Required. + :vartype credentials: ~azure.ai.projects.models.BaseCredentials + :ivar metadata: Metadata of the connection. Required. + :vartype metadata: dict[str, str] """ - type: Literal["eq", "ne", "gt", "gte", "lt", "lte"] = rest_field( + name: str = rest_field(visibility=["read"]) + """The friendly name of the connection, provided by the user. Required.""" + id: str = rest_field(visibility=["read"]) + """A unique identifier for the connection, generated by the service. Required.""" + type: Union[str, "_models.ConnectionType"] = rest_field(visibility=["read"]) + """Category of the connection. Required. Known values are: \"AzureOpenAI\", \"AzureBlob\", + \"AzureStorageAccount\", \"CognitiveSearch\", \"CosmosDB\", \"ApiKey\", \"AppConfig\", + \"AppInsights\", \"CustomKeys\", and \"RemoteTool\".""" + target: str = rest_field(visibility=["read"]) + """The connection URL to be used for this service. Required.""" + is_default: bool = rest_field(name="isDefault", visibility=["read"]) + """Whether the connection is tagged as the default connection of its type. Required.""" + credentials: "_models.BaseCredentials" = rest_field(visibility=["read"]) + """The credentials used by the connection. Required.""" + metadata: dict[str, str] = rest_field(visibility=["read"]) + """Metadata of the connection. Required.""" + + +class ContainerAppAgentDefinition(AgentDefinition, discriminator="container_app"): + """The container app agent definition. + + :ivar rai_config: Configuration for Responsible AI (RAI) content filtering and safety features. + :vartype rai_config: ~azure.ai.projects.models.RaiConfig + :ivar kind: Required. + :vartype kind: str or ~azure.ai.projects.models.CONTAINER_APP + :ivar container_protocol_versions: The protocols that the agent supports for ingress + communication of the containers. Required. + :vartype container_protocol_versions: list[~azure.ai.projects.models.ProtocolVersionRecord] + :ivar container_app_resource_id: The resource ID of the Azure Container App that hosts this + agent. Not mutable across versions. Required. + :vartype container_app_resource_id: str + :ivar ingress_subdomain_suffix: The suffix to apply to the app subdomain when sending ingress + to the agent. This can be a label (e.g., '---current'), a specific revision (e.g., + '--0000001'), or empty to use the default endpoint for the container app. Required. + :vartype ingress_subdomain_suffix: str + """ + + kind: Literal[AgentKind.CONTAINER_APP] = rest_discriminator(name="kind", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + container_protocol_versions: list["_models.ProtocolVersionRecord"] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) - """Specifies the comparison operator: - ``eq`` (equal), ``ne`` (not equal), ``gt`` (greater than), ``gte`` (greater than or equal), - ``lt`` (less than), ``lte`` (less than or equal). Required. Is one of the following types: - Literal[\"eq\"], Literal[\"ne\"], Literal[\"gt\"], Literal[\"gte\"], Literal[\"lt\"], - Literal[\"lte\"]""" - key: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The key to compare against the value. Required.""" - value: Union[str, float, bool] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The value to compare against the attribute key; supports string, number, or boolean types. - Required. Is one of the following types: str, float, bool""" + """The protocols that the agent supports for ingress communication of the containers. Required.""" + container_app_resource_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The resource ID of the Azure Container App that hosts this agent. Not mutable across versions. + Required.""" + ingress_subdomain_suffix: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The suffix to apply to the app subdomain when sending ingress to the agent. This can be a label + (e.g., '---current'), a specific revision (e.g., '--0000001'), or empty to use the default + endpoint for the container app. Required.""" @overload def __init__( self, *, - type: Literal["eq", "ne", "gt", "gte", "lt", "lte"], - key: str, - value: Union[str, float, bool], + container_protocol_versions: list["_models.ProtocolVersionRecord"], + container_app_resource_id: str, + ingress_subdomain_suffix: str, + rai_config: Optional["_models.RaiConfig"] = None, ) -> None: ... @overload @@ -2792,34 +3063,51 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) + self.kind = AgentKind.CONTAINER_APP # type: ignore -class CompoundFilter(_Model): - """Combine multiple filters using ``and`` or ``or``. +class ContainerFileCitationBody(Annotation, discriminator="container_file_citation"): + """Container file citation. - :ivar type: Type of operation: ``and`` or ``or``. Required. Is either a Literal["and"] type or - a Literal["or"] type. - :vartype type: str or str - :ivar filters: Array of filters to combine. Items can be ``ComparisonFilter`` or - ``CompoundFilter``. Required. - :vartype filters: list[~azure.ai.projects.models.ComparisonFilter or - ~azure.ai.projects.models.CompoundFilter] + :ivar type: The type of the container file citation. Always ``container_file_citation``. + Required. + :vartype type: str or ~azure.ai.projects.models.CONTAINER_FILE_CITATION + :ivar container_id: The ID of the container file. Required. + :vartype container_id: str + :ivar file_id: The ID of the file. Required. + :vartype file_id: str + :ivar start_index: The index of the first character of the container file citation in the + message. Required. + :vartype start_index: int + :ivar end_index: The index of the last character of the container file citation in the message. + Required. + :vartype end_index: int + :ivar filename: The filename of the container file cited. Required. + :vartype filename: str """ - type: Literal["and", "or"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Type of operation: ``and`` or ``or``. Required. Is either a Literal[\"and\"] type or a - Literal[\"or\"] type.""" - filters: list[Union["_models.ComparisonFilter", "_models.CompoundFilter"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """Array of filters to combine. Items can be ``ComparisonFilter`` or ``CompoundFilter``. Required.""" + type: Literal[AnnotationType.CONTAINER_FILE_CITATION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the container file citation. Always ``container_file_citation``. Required.""" + container_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the container file. Required.""" + file_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the file. Required.""" + start_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the first character of the container file citation in the message. Required.""" + end_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the last character of the container file citation in the message. Required.""" + filename: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The filename of the container file cited. Required.""" @overload def __init__( self, *, - type: Literal["and", "or"], - filters: list[Union["_models.ComparisonFilter", "_models.CompoundFilter"]], + container_id: str, + file_id: str, + start_index: int, + end_index: int, + filename: str, ) -> None: ... @overload @@ -2831,25 +3119,24 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) + self.type = AnnotationType.CONTAINER_FILE_CITATION # type: ignore -class ComputerAction(_Model): - """ComputerAction. +class EvaluationRuleAction(_Model): + """Evaluation action model. You probably want to use the sub-classes and not this class directly. Known sub-classes are: - ComputerActionClick, ComputerActionDoubleClick, ComputerActionDrag, ComputerActionKeyPress, - ComputerActionMove, ComputerActionScreenshot, ComputerActionScroll, ComputerActionTypeKeys, - ComputerActionWait + ContinuousEvaluationRuleAction, HumanEvaluationRuleAction - :ivar type: Required. Known values are: "screenshot", "click", "double_click", "scroll", - "type", "wait", "keypress", "drag", and "move". - :vartype type: str or ~azure.ai.projects.models.ComputerActionType + :ivar type: Type of the evaluation action. Required. Known values are: "continuousEvaluation" + and "humanEvaluation". + :vartype type: str or ~azure.ai.projects.models.EvaluationRuleActionType """ __mapping__: dict[str, _Model] = {} type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) - """Required. Known values are: \"screenshot\", \"click\", \"double_click\", \"scroll\", \"type\", - \"wait\", \"keypress\", \"drag\", and \"move\".""" + """Type of the evaluation action. Required. Known values are: \"continuousEvaluation\" and + \"humanEvaluation\".""" @overload def __init__( @@ -2869,44 +3156,32 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class ComputerActionClick(ComputerAction, discriminator="click"): - """A click action. +class ContinuousEvaluationRuleAction(EvaluationRuleAction, discriminator="continuousEvaluation"): + """Evaluation rule action for continuous evaluation. - :ivar type: Specifies the event type. For a click action, this property is - always set to ``click``. Required. - :vartype type: str or ~azure.ai.projects.models.CLICK - :ivar button: Indicates which mouse button was pressed during the click. One of ``left``, - ``right``, ``wheel``, ``back``, or ``forward``. Required. Is one of the following types: - Literal["left"], Literal["right"], Literal["wheel"], Literal["back"], Literal["forward"] - :vartype button: str or str or str or str or str - :ivar x: The x-coordinate where the click occurred. Required. - :vartype x: int - :ivar y: The y-coordinate where the click occurred. Required. - :vartype y: int + :ivar type: Required. Continuous evaluation. + :vartype type: str or ~azure.ai.projects.models.CONTINUOUS_EVALUATION + :ivar eval_id: Eval Id to add continuous evaluation runs to. Required. + :vartype eval_id: str + :ivar max_hourly_runs: Maximum number of evaluation runs allowed per hour. + :vartype max_hourly_runs: int """ - type: Literal[ComputerActionType.CLICK] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Specifies the event type. For a click action, this property is - always set to ``click``. Required.""" - button: Literal["left", "right", "wheel", "back", "forward"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] + type: Literal[EvaluationRuleActionType.CONTINUOUS_EVALUATION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required. Continuous evaluation.""" + eval_id: str = rest_field(name="evalId", visibility=["read", "create", "update", "delete", "query"]) + """Eval Id to add continuous evaluation runs to. Required.""" + max_hourly_runs: Optional[int] = rest_field( + name="maxHourlyRuns", visibility=["read", "create", "update", "delete", "query"] ) - """Indicates which mouse button was pressed during the click. One of ``left``, ``right``, - ``wheel``, ``back``, or ``forward``. Required. Is one of the following types: - Literal[\"left\"], Literal[\"right\"], Literal[\"wheel\"], Literal[\"back\"], - Literal[\"forward\"]""" - x: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The x-coordinate where the click occurred. Required.""" - y: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The y-coordinate where the click occurred. Required.""" + """Maximum number of evaluation runs allowed per hour.""" @overload def __init__( self, *, - button: Literal["left", "right", "wheel", "back", "forward"], - x: int, - y: int, + eval_id: str, + max_hourly_runs: Optional[int] = None, ) -> None: ... @overload @@ -2918,35 +3193,62 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ComputerActionType.CLICK # type: ignore + self.type = EvaluationRuleActionType.CONTINUOUS_EVALUATION # type: ignore -class ComputerActionDoubleClick(ComputerAction, discriminator="double_click"): - """A double click action. +class CosmosDBIndex(Index, discriminator="CosmosDBNoSqlVectorStore"): + """CosmosDB Vector Store Index Definition. - :ivar type: Specifies the event type. For a double click action, this property is - always set to ``double_click``. Required. - :vartype type: str or ~azure.ai.projects.models.DOUBLE_CLICK - :ivar x: The x-coordinate where the double click occurred. Required. - :vartype x: int - :ivar y: The y-coordinate where the double click occurred. Required. - :vartype y: int + :ivar id: Asset ID, a unique identifier for the asset. + :vartype id: str + :ivar name: The name of the resource. Required. + :vartype name: str + :ivar version: The version of the resource. Required. + :vartype version: str + :ivar description: The asset description text. + :vartype description: str + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar type: Type of index. Required. CosmosDB + :vartype type: str or ~azure.ai.projects.models.COSMOS_DB + :ivar connection_name: Name of connection to CosmosDB. Required. + :vartype connection_name: str + :ivar database_name: Name of the CosmosDB Database. Required. + :vartype database_name: str + :ivar container_name: Name of CosmosDB Container. Required. + :vartype container_name: str + :ivar embedding_configuration: Embedding model configuration. Required. + :vartype embedding_configuration: ~azure.ai.projects.models.EmbeddingConfiguration + :ivar field_mapping: Field mapping configuration. Required. + :vartype field_mapping: ~azure.ai.projects.models.FieldMapping """ - type: Literal[ComputerActionType.DOUBLE_CLICK] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Specifies the event type. For a double click action, this property is - always set to ``double_click``. Required.""" - x: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The x-coordinate where the double click occurred. Required.""" - y: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The y-coordinate where the double click occurred. Required.""" + type: Literal[IndexType.COSMOS_DB] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Type of index. Required. CosmosDB""" + connection_name: str = rest_field(name="connectionName", visibility=["create"]) + """Name of connection to CosmosDB. Required.""" + database_name: str = rest_field(name="databaseName", visibility=["create"]) + """Name of the CosmosDB Database. Required.""" + container_name: str = rest_field(name="containerName", visibility=["create"]) + """Name of CosmosDB Container. Required.""" + embedding_configuration: "_models.EmbeddingConfiguration" = rest_field( + name="embeddingConfiguration", visibility=["create"] + ) + """Embedding model configuration. Required.""" + field_mapping: "_models.FieldMapping" = rest_field(name="fieldMapping", visibility=["create"]) + """Field mapping configuration. Required.""" @overload def __init__( self, *, - x: int, - y: int, + connection_name: str, + database_name: str, + container_name: str, + embedding_configuration: "_models.EmbeddingConfiguration", + field_mapping: "_models.FieldMapping", + description: Optional[str] = None, + tags: Optional[dict[str, str]] = None, ) -> None: ... @overload @@ -2958,48 +3260,29 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ComputerActionType.DOUBLE_CLICK # type: ignore - - -class ComputerActionDrag(ComputerAction, discriminator="drag"): - """A drag action. + self.type = IndexType.COSMOS_DB # type: ignore - :ivar type: Specifies the event type. For a drag action, this property is - always set to ``drag``. Required. - :vartype type: str or ~azure.ai.projects.models.DRAG - :ivar path: An array of coordinates representing the path of the drag action. Coordinates will - appear as an array - of objects, eg - .. code-block:: +class CreatedBy(_Model): + """CreatedBy. - [ - { x: 100, y: 200 }, - { x: 200, y: 300 } - ]. Required. - :vartype path: list[~azure.ai.projects.models.Coordinate] + :ivar agent: The agent that created the item. + :vartype agent: ~azure.ai.projects.models.AgentId + :ivar response_id: The response on which the item is created. + :vartype response_id: str """ - type: Literal[ComputerActionType.DRAG] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Specifies the event type. For a drag action, this property is - always set to ``drag``. Required.""" - path: list["_models.Coordinate"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """An array of coordinates representing the path of the drag action. Coordinates will appear as an - array - of objects, eg - - .. code-block:: - - [ - { x: 100, y: 200 }, - { x: 200, y: 300 } - ]. Required.""" + agent: Optional["_models.AgentId"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The agent that created the item.""" + response_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The response on which the item is created.""" @overload def __init__( self, *, - path: list["_models.Coordinate"], + agent: Optional["_models.AgentId"] = None, + response_id: Optional[str] = None, ) -> None: ... @overload @@ -3011,32 +3294,28 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ComputerActionType.DRAG # type: ignore -class ComputerActionKeyPress(ComputerAction, discriminator="keypress"): - """A collection of keypresses the model would like to perform. +class Trigger(_Model): + """Base model for Trigger of the schedule. - :ivar type: Specifies the event type. For a keypress action, this property is - always set to ``keypress``. Required. - :vartype type: str or ~azure.ai.projects.models.KEYPRESS - :ivar keys_property: The combination of keys the model is requesting to be pressed. This is an - array of strings, each representing a key. Required. - :vartype keys_property: list[str] + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + CronTrigger, OneTimeTrigger, RecurrenceTrigger + + :ivar type: Type of the trigger. Required. Known values are: "Cron", "Recurrence", and + "OneTime". + :vartype type: str or ~azure.ai.projects.models.TriggerType """ - type: Literal[ComputerActionType.KEYPRESS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Specifies the event type. For a keypress action, this property is - always set to ``keypress``. Required.""" - keys_property: list[str] = rest_field(name="keys", visibility=["read", "create", "update", "delete", "query"]) - """The combination of keys the model is requesting to be pressed. This is an - array of strings, each representing a key. Required.""" + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Type of the trigger. Required. Known values are: \"Cron\", \"Recurrence\", and \"OneTime\".""" @overload def __init__( self, *, - keys_property: list[str], + type: str, ) -> None: ... @overload @@ -3048,35 +3327,42 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ComputerActionType.KEYPRESS # type: ignore -class ComputerActionMove(ComputerAction, discriminator="move"): - """A mouse move action. +class CronTrigger(Trigger, discriminator="Cron"): + """Cron based trigger. - :ivar type: Specifies the event type. For a move action, this property is - always set to ``move``. Required. - :vartype type: str or ~azure.ai.projects.models.MOVE - :ivar x: The x-coordinate to move to. Required. - :vartype x: int - :ivar y: The y-coordinate to move to. Required. - :vartype y: int + :ivar type: Required. Cron based trigger. + :vartype type: str or ~azure.ai.projects.models.CRON + :ivar expression: Cron expression that defines the schedule frequency. Required. + :vartype expression: str + :ivar time_zone: Time zone for the cron schedule. + :vartype time_zone: str + :ivar start_time: Start time for the cron schedule in ISO 8601 format. + :vartype start_time: str + :ivar end_time: End time for the cron schedule in ISO 8601 format. + :vartype end_time: str """ - type: Literal[ComputerActionType.MOVE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Specifies the event type. For a move action, this property is - always set to ``move``. Required.""" - x: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The x-coordinate to move to. Required.""" - y: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The y-coordinate to move to. Required.""" + type: Literal[TriggerType.CRON] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required. Cron based trigger.""" + expression: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Cron expression that defines the schedule frequency. Required.""" + time_zone: Optional[str] = rest_field(name="timeZone", visibility=["read", "create", "update", "delete", "query"]) + """Time zone for the cron schedule.""" + start_time: Optional[str] = rest_field(name="startTime", visibility=["read", "create", "update", "delete", "query"]) + """Start time for the cron schedule in ISO 8601 format.""" + end_time: Optional[str] = rest_field(name="endTime", visibility=["read", "create", "update", "delete", "query"]) + """End time for the cron schedule in ISO 8601 format.""" @overload def __init__( self, *, - x: int, - y: int, + expression: str, + time_zone: Optional[str] = None, + start_time: Optional[str] = None, + end_time: Optional[str] = None, ) -> None: ... @overload @@ -3088,20 +3374,18 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ComputerActionType.MOVE # type: ignore + self.type = TriggerType.CRON # type: ignore -class ComputerActionScreenshot(ComputerAction, discriminator="screenshot"): - """A screenshot action. +class CustomCredential(BaseCredentials, discriminator="CustomKeys"): + """Custom credential definition. - :ivar type: Specifies the event type. For a screenshot action, this property is - always set to ``screenshot``. Required. - :vartype type: str or ~azure.ai.projects.models.SCREENSHOT + :ivar type: The credential type. Required. Custom credential + :vartype type: str or ~azure.ai.projects.models.CUSTOM """ - type: Literal[ComputerActionType.SCREENSHOT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Specifies the event type. For a screenshot action, this property is - always set to ``screenshot``. Required.""" + type: Literal[CredentialType.CUSTOM] = rest_discriminator(name="type", visibility=["read"]) # type: ignore + """The credential type. Required. Custom credential""" @overload def __init__( @@ -3117,45 +3401,28 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ComputerActionType.SCREENSHOT # type: ignore + self.type = CredentialType.CUSTOM # type: ignore -class ComputerActionScroll(ComputerAction, discriminator="scroll"): - """A scroll action. +class CustomToolParamFormat(_Model): + """The input format for the custom tool. Default is unconstrained text. - :ivar type: Specifies the event type. For a scroll action, this property is - always set to ``scroll``. Required. - :vartype type: str or ~azure.ai.projects.models.SCROLL - :ivar x: The x-coordinate where the scroll occurred. Required. - :vartype x: int - :ivar y: The y-coordinate where the scroll occurred. Required. - :vartype y: int - :ivar scroll_x: The horizontal scroll distance. Required. - :vartype scroll_x: int - :ivar scroll_y: The vertical scroll distance. Required. - :vartype scroll_y: int + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + CustomGrammarFormatParam, CustomTextFormatParam + + :ivar type: Required. Known values are: "text" and "grammar". + :vartype type: str or ~azure.ai.projects.models.CustomToolParamFormatType """ - type: Literal[ComputerActionType.SCROLL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Specifies the event type. For a scroll action, this property is - always set to ``scroll``. Required.""" - x: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The x-coordinate where the scroll occurred. Required.""" - y: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The y-coordinate where the scroll occurred. Required.""" - scroll_x: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The horizontal scroll distance. Required.""" - scroll_y: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The vertical scroll distance. Required.""" + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Required. Known values are: \"text\" and \"grammar\".""" @overload def __init__( self, *, - x: int, - y: int, - scroll_x: int, - scroll_y: int, + type: str, ) -> None: ... @overload @@ -3167,30 +3434,36 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ComputerActionType.SCROLL # type: ignore -class ComputerActionTypeKeys(ComputerAction, discriminator="type"): - """An action to type in text. +class CustomGrammarFormatParam(CustomToolParamFormat, discriminator="grammar"): + """Grammar format. - :ivar type: Specifies the event type. For a type action, this property is - always set to ``type``. Required. - :vartype type: str or ~azure.ai.projects.models.TYPE - :ivar text: The text to type. Required. - :vartype text: str + :ivar type: Grammar format. Always ``grammar``. Required. + :vartype type: str or ~azure.ai.projects.models.GRAMMAR + :ivar syntax: The syntax of the grammar definition. One of ``lark`` or ``regex``. Required. + Known values are: "lark" and "regex". + :vartype syntax: str or ~azure.ai.projects.models.GrammarSyntax1 + :ivar definition: The grammar definition. Required. + :vartype definition: str """ - type: Literal[ComputerActionType.TYPE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Specifies the event type. For a type action, this property is - always set to ``type``. Required.""" - text: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The text to type. Required.""" + type: Literal[CustomToolParamFormatType.GRAMMAR] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Grammar format. Always ``grammar``. Required.""" + syntax: Union[str, "_models.GrammarSyntax1"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The syntax of the grammar definition. One of ``lark`` or ``regex``. Required. Known values are: + \"lark\" and \"regex\".""" + definition: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The grammar definition. Required.""" @overload def __init__( self, *, - text: str, + syntax: Union[str, "_models.GrammarSyntax1"], + definition: str, ) -> None: ... @overload @@ -3202,20 +3475,18 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ComputerActionType.TYPE # type: ignore + self.type = CustomToolParamFormatType.GRAMMAR # type: ignore -class ComputerActionWait(ComputerAction, discriminator="wait"): - """A wait action. +class CustomTextFormatParam(CustomToolParamFormat, discriminator="text"): + """Text format. - :ivar type: Specifies the event type. For a wait action, this property is - always set to ``wait``. Required. - :vartype type: str or ~azure.ai.projects.models.WAIT + :ivar type: Unconstrained text format. Always ``text``. Required. + :vartype type: str or ~azure.ai.projects.models.TEXT """ - type: Literal[ComputerActionType.WAIT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Specifies the event type. For a wait action, this property is - always set to ``wait``. Required.""" + type: Literal[CustomToolParamFormatType.TEXT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Unconstrained text format. Always ``text``. Required.""" @overload def __init__( @@ -3231,42 +3502,40 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ComputerActionType.WAIT # type: ignore + self.type = CustomToolParamFormatType.TEXT # type: ignore -class ComputerToolCallItemParam(ItemParam, discriminator="computer_call"): - """A tool call to a computer use tool. See the - `computer use guide `_ for more - information. +class CustomToolParam(Tool, discriminator="custom"): + """Custom tool. - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.COMPUTER_CALL - :ivar call_id: An identifier used when responding to the tool call with output. Required. - :vartype call_id: str - :ivar action: Required. - :vartype action: ~azure.ai.projects.models.ComputerAction - :ivar pending_safety_checks: The pending safety checks for the computer call. Required. - :vartype pending_safety_checks: list[~azure.ai.projects.models.ComputerToolCallSafetyCheck] + :ivar type: The type of the custom tool. Always ``custom``. Required. + :vartype type: str or ~azure.ai.projects.models.CUSTOM + :ivar name: The name of the custom tool, used to identify it in tool calls. Required. + :vartype name: str + :ivar description: Optional description of the custom tool, used to provide more context. + :vartype description: str + :ivar format: The input format for the custom tool. Default is unconstrained text. + :vartype format: ~azure.ai.projects.models.CustomToolParamFormat """ - type: Literal[ItemType.COMPUTER_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """An identifier used when responding to the tool call with output. Required.""" - action: "_models.ComputerAction" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Required.""" - pending_safety_checks: list["_models.ComputerToolCallSafetyCheck"] = rest_field( + type: Literal[ToolType.CUSTOM] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the custom tool. Always ``custom``. Required.""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the custom tool, used to identify it in tool calls. Required.""" + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Optional description of the custom tool, used to provide more context.""" + format: Optional["_models.CustomToolParamFormat"] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) - """The pending safety checks for the computer call. Required.""" + """The input format for the custom tool. Default is unconstrained text.""" @overload def __init__( self, *, - call_id: str, - action: "_models.ComputerAction", - pending_safety_checks: list["_models.ComputerToolCallSafetyCheck"], + name: str, + description: Optional[str] = None, + format: Optional["_models.CustomToolParamFormat"] = None, ) -> None: ... @overload @@ -3278,59 +3547,31 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ItemType.COMPUTER_CALL # type: ignore + self.type = ToolType.CUSTOM # type: ignore -class ComputerToolCallItemResource(ItemResource, discriminator="computer_call"): - """A tool call to a computer use tool. See the - `computer use guide `_ for more - information. +class RecurrenceSchedule(_Model): + """Recurrence schedule model. - :ivar id: Required. - :vartype id: str - :ivar created_by: The information about the creator of the item. - :vartype created_by: ~azure.ai.projects.models.CreatedBy - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.COMPUTER_CALL - :ivar status: The status of the item. One of ``in_progress``, ``completed``, or - ``incomplete``. Populated when items are returned via API. Required. Is one of the following - types: Literal["in_progress"], Literal["completed"], Literal["incomplete"] - :vartype status: str or str or str - :ivar call_id: An identifier used when responding to the tool call with output. Required. - :vartype call_id: str - :ivar action: Required. - :vartype action: ~azure.ai.projects.models.ComputerAction - :ivar pending_safety_checks: The pending safety checks for the computer call. Required. - :vartype pending_safety_checks: list[~azure.ai.projects.models.ComputerToolCallSafetyCheck] + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + DailyRecurrenceSchedule, HourlyRecurrenceSchedule, MonthlyRecurrenceSchedule, + WeeklyRecurrenceSchedule + + :ivar type: Recurrence type for the recurrence schedule. Required. Known values are: "Hourly", + "Daily", "Weekly", and "Monthly". + :vartype type: str or ~azure.ai.projects.models.RecurrenceType """ - type: Literal[ItemType.COMPUTER_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - status: Literal["in_progress", "completed", "incomplete"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The status of the item. One of ``in_progress``, ``completed``, or - ``incomplete``. Populated when items are returned via API. Required. Is one of the following - types: Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"]""" - call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """An identifier used when responding to the tool call with output. Required.""" - action: "_models.ComputerAction" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Required.""" - pending_safety_checks: list["_models.ComputerToolCallSafetyCheck"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The pending safety checks for the computer call. Required.""" + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Recurrence type for the recurrence schedule. Required. Known values are: \"Hourly\", \"Daily\", + \"Weekly\", and \"Monthly\".""" @overload def __init__( self, *, - id: str, # pylint: disable=redefined-builtin - status: Literal["in_progress", "completed", "incomplete"], - call_id: str, - action: "_models.ComputerAction", - pending_safety_checks: list["_models.ComputerToolCallSafetyCheck"], - created_by: Optional["_models.CreatedBy"] = None, + type: str, ) -> None: ... @overload @@ -3342,28 +3583,27 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ItemType.COMPUTER_CALL # type: ignore -class ComputerToolCallOutputItemOutput(_Model): - """ComputerToolCallOutputItemOutput. - - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - ComputerToolCallOutputItemOutputComputerScreenshot +class DailyRecurrenceSchedule(RecurrenceSchedule, discriminator="Daily"): + """Daily recurrence schedule. - :ivar type: Required. "computer_screenshot" - :vartype type: str or ~azure.ai.projects.models.ComputerToolCallOutputItemOutputType + :ivar type: Daily recurrence type. Required. Daily recurrence pattern. + :vartype type: str or ~azure.ai.projects.models.DAILY + :ivar hours: Hours for the recurrence schedule. Required. + :vartype hours: list[int] """ - __mapping__: dict[str, _Model] = {} - type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) - """Required. \"computer_screenshot\"""" + type: Literal[RecurrenceType.DAILY] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Daily recurrence type. Required. Daily recurrence pattern.""" + hours: list[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Hours for the recurrence schedule. Required.""" @overload def __init__( self, *, - type: str, + hours: list[int], ) -> None: ... @overload @@ -3375,32 +3615,26 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) + self.type = RecurrenceType.DAILY # type: ignore -class ComputerToolCallOutputItemOutputComputerScreenshot( - ComputerToolCallOutputItemOutput, discriminator="computer_screenshot" -): # pylint: disable=name-too-long - """ComputerToolCallOutputItemOutputComputerScreenshot. +class DatasetCredential(_Model): + """Represents a reference to a blob for consumption. - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.SCREENSHOT - :ivar image_url: - :vartype image_url: str - :ivar file_id: - :vartype file_id: str + :ivar blob_reference: Credential info to access the storage account. Required. + :vartype blob_reference: ~azure.ai.projects.models.BlobReference """ - type: Literal[ComputerToolCallOutputItemOutputType.SCREENSHOT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - image_url: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - file_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + blob_reference: "_models.BlobReference" = rest_field( + name="blobReference", visibility=["read", "create", "update", "delete", "query"] + ) + """Credential info to access the storage account. Required.""" @overload def __init__( self, *, - image_url: Optional[str] = None, - file_id: Optional[str] = None, + blob_reference: "_models.BlobReference", ) -> None: ... @overload @@ -3412,46 +3646,69 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ComputerToolCallOutputItemOutputType.SCREENSHOT # type: ignore -class ComputerToolCallOutputItemParam(ItemParam, discriminator="computer_call_output"): - """The output of a computer tool call. +class DatasetVersion(_Model): + """DatasetVersion Definition. - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.COMPUTER_CALL_OUTPUT - :ivar call_id: The ID of the computer tool call that produced the output. Required. - :vartype call_id: str - :ivar acknowledged_safety_checks: The safety checks reported by the API that have been - acknowledged by the - developer. - :vartype acknowledged_safety_checks: - list[~azure.ai.projects.models.ComputerToolCallSafetyCheck] - :ivar output: Required. - :vartype output: ~azure.ai.projects.models.ComputerToolCallOutputItemOutput + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + FileDatasetVersion, FolderDatasetVersion + + :ivar data_uri: URI of the data (`example `_). + Required. + :vartype data_uri: str + :ivar type: Dataset type. Required. Known values are: "uri_file" and "uri_folder". + :vartype type: str or ~azure.ai.projects.models.DatasetType + :ivar is_reference: Indicates if the dataset holds a reference to the storage, or the dataset + manages storage itself. If true, the underlying data will not be deleted when the dataset + version is deleted. + :vartype is_reference: bool + :ivar connection_name: The Azure Storage Account connection name. Required if + startPendingUploadVersion was not called before creating the Dataset. + :vartype connection_name: str + :ivar id: Asset ID, a unique identifier for the asset. + :vartype id: str + :ivar name: The name of the resource. Required. + :vartype name: str + :ivar version: The version of the resource. Required. + :vartype version: str + :ivar description: The asset description text. + :vartype description: str + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] """ - type: Literal[ItemType.COMPUTER_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The ID of the computer tool call that produced the output. Required.""" - acknowledged_safety_checks: Optional[list["_models.ComputerToolCallSafetyCheck"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The safety checks reported by the API that have been acknowledged by the - developer.""" - output: "_models.ComputerToolCallOutputItemOutput" = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """Required.""" + __mapping__: dict[str, _Model] = {} + data_uri: str = rest_field(name="dataUri", visibility=["read", "create"]) + """URI of the data (`example `_). Required.""" + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Dataset type. Required. Known values are: \"uri_file\" and \"uri_folder\".""" + is_reference: Optional[bool] = rest_field(name="isReference", visibility=["read"]) + """Indicates if the dataset holds a reference to the storage, or the dataset manages storage + itself. If true, the underlying data will not be deleted when the dataset version is deleted.""" + connection_name: Optional[str] = rest_field(name="connectionName", visibility=["read", "create"]) + """The Azure Storage Account connection name. Required if startPendingUploadVersion was not called + before creating the Dataset.""" + id: Optional[str] = rest_field(visibility=["read"]) + """Asset ID, a unique identifier for the asset.""" + name: str = rest_field(visibility=["read"]) + """The name of the resource. Required.""" + version: str = rest_field(visibility=["read"]) + """The version of the resource. Required.""" + description: Optional[str] = rest_field(visibility=["create", "update"]) + """The asset description text.""" + tags: Optional[dict[str, str]] = rest_field(visibility=["create", "update"]) + """Tag dictionary. Tags can be added, removed, and updated.""" @overload def __init__( self, *, - call_id: str, - output: "_models.ComputerToolCallOutputItemOutput", - acknowledged_safety_checks: Optional[list["_models.ComputerToolCallSafetyCheck"]] = None, + data_uri: str, + type: str, + connection_name: Optional[str] = None, + description: Optional[str] = None, + tags: Optional[dict[str, str]] = None, ) -> None: ... @overload @@ -3463,63 +3720,33 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ItemType.COMPUTER_CALL_OUTPUT # type: ignore -class ComputerToolCallOutputItemResource(ItemResource, discriminator="computer_call_output"): - """The output of a computer tool call. +class DeleteAgentResponse(_Model): + """A deleted agent Object. - :ivar id: Required. - :vartype id: str - :ivar created_by: The information about the creator of the item. - :vartype created_by: ~azure.ai.projects.models.CreatedBy - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.COMPUTER_CALL_OUTPUT - :ivar status: The status of the item. One of ``in_progress``, ``completed``, or - ``incomplete``. Populated when items are returned via API. Required. Is one of the following - types: Literal["in_progress"], Literal["completed"], Literal["incomplete"] - :vartype status: str or str or str - :ivar call_id: The ID of the computer tool call that produced the output. Required. - :vartype call_id: str - :ivar acknowledged_safety_checks: The safety checks reported by the API that have been - acknowledged by the - developer. - :vartype acknowledged_safety_checks: - list[~azure.ai.projects.models.ComputerToolCallSafetyCheck] - :ivar output: Required. - :vartype output: ~azure.ai.projects.models.ComputerToolCallOutputItemOutput + :ivar object: The object type. Always 'agent.deleted'. Required. Default value is + "agent.deleted". + :vartype object: str + :ivar name: The name of the agent. Required. + :vartype name: str + :ivar deleted: Whether the agent was successfully deleted. Required. + :vartype deleted: bool """ - type: Literal[ItemType.COMPUTER_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - status: Literal["in_progress", "completed", "incomplete"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The status of the item. One of ``in_progress``, ``completed``, or - ``incomplete``. Populated when items are returned via API. Required. Is one of the following - types: Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"]""" - call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The ID of the computer tool call that produced the output. Required.""" - acknowledged_safety_checks: Optional[list["_models.ComputerToolCallSafetyCheck"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The safety checks reported by the API that have been acknowledged by the - developer.""" - output: "_models.ComputerToolCallOutputItemOutput" = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """Required.""" + object: Literal["agent.deleted"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The object type. Always 'agent.deleted'. Required. Default value is \"agent.deleted\".""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the agent. Required.""" + deleted: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Whether the agent was successfully deleted. Required.""" @overload def __init__( self, *, - id: str, # pylint: disable=redefined-builtin - status: Literal["in_progress", "completed", "incomplete"], - call_id: str, - output: "_models.ComputerToolCallOutputItemOutput", - created_by: Optional["_models.CreatedBy"] = None, - acknowledged_safety_checks: Optional[list["_models.ComputerToolCallSafetyCheck"]] = None, + name: str, + deleted: bool, ) -> None: ... @overload @@ -3531,34 +3758,39 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ItemType.COMPUTER_CALL_OUTPUT # type: ignore + self.object: Literal["agent.deleted"] = "agent.deleted" -class ComputerToolCallSafetyCheck(_Model): - """A pending safety check for the computer call. +class DeleteAgentVersionResponse(_Model): + """A deleted agent version Object. - :ivar id: The ID of the pending safety check. Required. - :vartype id: str - :ivar code: The type of the pending safety check. Required. - :vartype code: str - :ivar message: Details about the pending safety check. Required. - :vartype message: str + :ivar object: The object type. Always 'agent.deleted'. Required. Default value is + "agent.version.deleted". + :vartype object: str + :ivar name: The name of the agent. Required. + :vartype name: str + :ivar version: The version identifier of the agent. Required. + :vartype version: str + :ivar deleted: Whether the agent was successfully deleted. Required. + :vartype deleted: bool """ - id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The ID of the pending safety check. Required.""" - code: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The type of the pending safety check. Required.""" - message: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Details about the pending safety check. Required.""" + object: Literal["agent.version.deleted"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The object type. Always 'agent.deleted'. Required. Default value is \"agent.version.deleted\".""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the agent. Required.""" + version: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The version identifier of the agent. Required.""" + deleted: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Whether the agent was successfully deleted. Required.""" @overload def __init__( self, *, - id: str, # pylint: disable=redefined-builtin - code: str, - message: str, + name: str, + version: str, + deleted: bool, ) -> None: ... @overload @@ -3570,44 +3802,35 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) + self.object: Literal["agent.version.deleted"] = "agent.version.deleted" -class ComputerUsePreviewTool(Tool, discriminator="computer_use_preview"): - """A tool that controls a virtual computer. Learn more about the `computer tool - `_. - - :ivar type: The type of the computer use tool. Always ``computer_use_preview``. Required. - :vartype type: str or ~azure.ai.projects.models.COMPUTER_USE_PREVIEW - :ivar environment: The type of computer environment to control. Required. Is one of the - following types: Literal["windows"], Literal["mac"], Literal["linux"], Literal["ubuntu"], - Literal["browser"] - :vartype environment: str or str or str or str or str - :ivar display_width: The width of the computer display. Required. - :vartype display_width: int - :ivar display_height: The height of the computer display. Required. - :vartype display_height: int +class DeleteMemoryStoreResult(_Model): + """DeleteMemoryStoreResult. + + :ivar object: The object type. Always 'memory_store.deleted'. Required. Default value is + "memory_store.deleted". + :vartype object: str + :ivar name: The name of the memory store. Required. + :vartype name: str + :ivar deleted: Whether the memory store was successfully deleted. Required. + :vartype deleted: bool """ - type: Literal[ToolType.COMPUTER_USE_PREVIEW] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the computer use tool. Always ``computer_use_preview``. Required.""" - environment: Literal["windows", "mac", "linux", "ubuntu", "browser"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The type of computer environment to control. Required. Is one of the following types: - Literal[\"windows\"], Literal[\"mac\"], Literal[\"linux\"], Literal[\"ubuntu\"], - Literal[\"browser\"]""" - display_width: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The width of the computer display. Required.""" - display_height: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The height of the computer display. Required.""" + object: Literal["memory_store.deleted"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The object type. Always 'memory_store.deleted'. Required. Default value is + \"memory_store.deleted\".""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the memory store. Required.""" + deleted: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Whether the memory store was successfully deleted. Required.""" @overload def __init__( self, *, - environment: Literal["windows", "mac", "linux", "ubuntu", "browser"], - display_width: int, - display_height: int, + name: str, + deleted: bool, ) -> None: ... @overload @@ -3619,90 +3842,32 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ToolType.COMPUTER_USE_PREVIEW # type: ignore + self.object: Literal["memory_store.deleted"] = "memory_store.deleted" -class Connection(_Model): - """Response from the list and get connections operations. +class Deployment(_Model): + """Model Deployment Definition. - :ivar name: The friendly name of the connection, provided by the user. Required. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + ModelDeployment + + :ivar type: The type of the deployment. Required. "ModelDeployment" + :vartype type: str or ~azure.ai.projects.models.DeploymentType + :ivar name: Name of the deployment. Required. :vartype name: str - :ivar id: A unique identifier for the connection, generated by the service. Required. - :vartype id: str - :ivar type: Category of the connection. Required. Known values are: "AzureOpenAI", "AzureBlob", - "AzureStorageAccount", "CognitiveSearch", "CosmosDB", "ApiKey", "AppConfig", "AppInsights", - "CustomKeys", and "RemoteTool". - :vartype type: str or ~azure.ai.projects.models.ConnectionType - :ivar target: The connection URL to be used for this service. Required. - :vartype target: str - :ivar is_default: Whether the connection is tagged as the default connection of its type. - Required. - :vartype is_default: bool - :ivar credentials: The credentials used by the connection. Required. - :vartype credentials: ~azure.ai.projects.models.BaseCredentials - :ivar metadata: Metadata of the connection. Required. - :vartype metadata: dict[str, str] """ + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """The type of the deployment. Required. \"ModelDeployment\"""" name: str = rest_field(visibility=["read"]) - """The friendly name of the connection, provided by the user. Required.""" - id: str = rest_field(visibility=["read"]) - """A unique identifier for the connection, generated by the service. Required.""" - type: Union[str, "_models.ConnectionType"] = rest_field(visibility=["read"]) - """Category of the connection. Required. Known values are: \"AzureOpenAI\", \"AzureBlob\", - \"AzureStorageAccount\", \"CognitiveSearch\", \"CosmosDB\", \"ApiKey\", \"AppConfig\", - \"AppInsights\", \"CustomKeys\", and \"RemoteTool\".""" - target: str = rest_field(visibility=["read"]) - """The connection URL to be used for this service. Required.""" - is_default: bool = rest_field(name="isDefault", visibility=["read"]) - """Whether the connection is tagged as the default connection of its type. Required.""" - credentials: "_models.BaseCredentials" = rest_field(visibility=["read"]) - """The credentials used by the connection. Required.""" - metadata: dict[str, str] = rest_field(visibility=["read"]) - """Metadata of the connection. Required.""" - - -class ContainerAppAgentDefinition(AgentDefinition, discriminator="container_app"): - """The container app agent definition. - - :ivar rai_config: Configuration for Responsible AI (RAI) content filtering and safety features. - :vartype rai_config: ~azure.ai.projects.models.RaiConfig - :ivar kind: Required. - :vartype kind: str or ~azure.ai.projects.models.CONTAINER_APP - :ivar container_protocol_versions: The protocols that the agent supports for ingress - communication of the containers. Required. - :vartype container_protocol_versions: list[~azure.ai.projects.models.ProtocolVersionRecord] - :ivar container_app_resource_id: The resource ID of the Azure Container App that hosts this - agent. Not mutable across versions. Required. - :vartype container_app_resource_id: str - :ivar ingress_subdomain_suffix: The suffix to apply to the app subdomain when sending ingress - to the agent. This can be a label (e.g., '---current'), a specific revision (e.g., - '--0000001'), or empty to use the default endpoint for the container app. Required. - :vartype ingress_subdomain_suffix: str - """ - - kind: Literal[AgentKind.CONTAINER_APP] = rest_discriminator(name="kind", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - container_protocol_versions: list["_models.ProtocolVersionRecord"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The protocols that the agent supports for ingress communication of the containers. Required.""" - container_app_resource_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The resource ID of the Azure Container App that hosts this agent. Not mutable across versions. - Required.""" - ingress_subdomain_suffix: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The suffix to apply to the app subdomain when sending ingress to the agent. This can be a label - (e.g., '---current'), a specific revision (e.g., '--0000001'), or empty to use the default - endpoint for the container app. Required.""" + """Name of the deployment. Required.""" @overload def __init__( self, *, - container_protocol_versions: list["_models.ProtocolVersionRecord"], - container_app_resource_id: str, - ingress_subdomain_suffix: str, - rai_config: Optional["_models.RaiConfig"] = None, + type: str, ) -> None: ... @overload @@ -3714,30 +3879,34 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.kind = AgentKind.CONTAINER_APP # type: ignore -class EvaluationRuleAction(_Model): - """Evaluation action model. - - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - ContinuousEvaluationRuleAction, HumanEvaluationRuleAction +class DoubleClickAction(ComputerAction, discriminator="double_click"): + """DoubleClick. - :ivar type: Type of the evaluation action. Required. Known values are: "continuousEvaluation" - and "humanEvaluation". - :vartype type: str or ~azure.ai.projects.models.EvaluationRuleActionType + :ivar type: Specifies the event type. For a double click action, this property is always set to + ``double_click``. Required. + :vartype type: str or ~azure.ai.projects.models.DOUBLE_CLICK + :ivar x: The x-coordinate where the double click occurred. Required. + :vartype x: int + :ivar y: The y-coordinate where the double click occurred. Required. + :vartype y: int """ - __mapping__: dict[str, _Model] = {} - type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) - """Type of the evaluation action. Required. Known values are: \"continuousEvaluation\" and - \"humanEvaluation\".""" + type: Literal[ComputerActionType.DOUBLE_CLICK] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Specifies the event type. For a double click action, this property is always set to + ``double_click``. Required.""" + x: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The x-coordinate where the double click occurred. Required.""" + y: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The y-coordinate where the double click occurred. Required.""" @overload def __init__( self, *, - type: str, + x: int, + y: int, ) -> None: ... @overload @@ -3749,34 +3918,46 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) + self.type = ComputerActionType.DOUBLE_CLICK # type: ignore -class ContinuousEvaluationRuleAction(EvaluationRuleAction, discriminator="continuousEvaluation"): - """Evaluation rule action for continuous evaluation. +class Drag(ComputerAction, discriminator="drag"): + """Drag. - :ivar type: Required. Continuous evaluation. - :vartype type: str or ~azure.ai.projects.models.CONTINUOUS_EVALUATION - :ivar eval_id: Eval Id to add continuous evaluation runs to. Required. - :vartype eval_id: str - :ivar max_hourly_runs: Maximum number of evaluation runs allowed per hour. - :vartype max_hourly_runs: int + :ivar type: Specifies the event type. For a drag action, this property is + always set to ``drag``. Required. + :vartype type: str or ~azure.ai.projects.models.DRAG + :ivar path: An array of coordinates representing the path of the drag action. Coordinates will + appear as an array of objects, eg + + .. code-block:: + + [ + { x: 100, y: 200 }, + { x: 200, y: 300 } + ]. Required. + :vartype path: list[~azure.ai.projects.models.DragPoint] """ - type: Literal[EvaluationRuleActionType.CONTINUOUS_EVALUATION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required. Continuous evaluation.""" - eval_id: str = rest_field(name="evalId", visibility=["read", "create", "update", "delete", "query"]) - """Eval Id to add continuous evaluation runs to. Required.""" - max_hourly_runs: Optional[int] = rest_field( - name="maxHourlyRuns", visibility=["read", "create", "update", "delete", "query"] - ) - """Maximum number of evaluation runs allowed per hour.""" + type: Literal[ComputerActionType.DRAG] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Specifies the event type. For a drag action, this property is + always set to ``drag``. Required.""" + path: list["_models.DragPoint"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """An array of coordinates representing the path of the drag action. Coordinates will appear as an + array of objects, eg + + .. code-block:: + + [ + { x: 100, y: 200 }, + { x: 200, y: 300 } + ]. Required.""" @overload def __init__( self, *, - eval_id: str, - max_hourly_runs: Optional[int] = None, + path: list["_models.DragPoint"], ) -> None: ... @overload @@ -3788,11 +3969,11 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = EvaluationRuleActionType.CONTINUOUS_EVALUATION # type: ignore + self.type = ComputerActionType.DRAG # type: ignore -class Coordinate(_Model): - """An x/y coordinate pair, e.g. ``{ x: 100, y: 200 }``. +class DragPoint(_Model): + """Coordinate. :ivar x: The x-coordinate. Required. :vartype x: int @@ -3824,59 +4005,47 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class CosmosDBIndex(Index, discriminator="CosmosDBNoSqlVectorStore"): - """CosmosDB Vector Store Index Definition. +class InputItem(_Model): + """An item representing part of the context for the response to be + generated by the model. Can contain text, images, and audio inputs, + as well as previous assistant responses and tool call outputs. - :ivar id: Asset ID, a unique identifier for the asset. - :vartype id: str - :ivar name: The name of the resource. Required. - :vartype name: str - :ivar version: The version of the resource. Required. - :vartype version: str - :ivar description: The asset description text. - :vartype description: str - :ivar tags: Tag dictionary. Tags can be added, removed, and updated. - :vartype tags: dict[str, str] - :ivar type: Type of index. Required. CosmosDB - :vartype type: str or ~azure.ai.projects.models.COSMOS_DB - :ivar connection_name: Name of connection to CosmosDB. Required. - :vartype connection_name: str - :ivar database_name: Name of the CosmosDB Database. Required. - :vartype database_name: str - :ivar container_name: Name of CosmosDB Container. Required. - :vartype container_name: str - :ivar embedding_configuration: Embedding model configuration. Required. - :vartype embedding_configuration: ~azure.ai.projects.models.EmbeddingConfiguration - :ivar field_mapping: Field mapping configuration. Required. - :vartype field_mapping: ~azure.ai.projects.models.FieldMapping + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + InputItemApplyPatchToolCallItemParam, InputItemApplyPatchToolCallOutputItemParam, + InputItemCodeInterpreterToolCall, InputItemCompactionSummaryItemParam, + InputItemComputerToolCall, InputItemComputerCallOutputItemParam, InputItemCustomToolCall, + InputItemCustomToolCallOutput, InputItemFileSearchToolCall, InputItemFunctionToolCall, + InputItemFunctionCallOutputItemParam, InputItemImageGenToolCall, ItemReferenceParam, + InputItemLocalShellToolCall, InputItemLocalShellToolCallOutput, InputItemMcpApprovalRequest, + InputItemMcpApprovalResponse, InputItemMcpToolCall, InputItemMcpListTools, EasyInputMessage, + InputItemOutputMessage, InputItemReasoningItem, InputItemFunctionShellCallItemParam, + InputItemFunctionShellCallOutputItemParam, InputItemWebSearchToolCall + + :ivar type: Required. Known values are: "message", "output_message", "file_search_call", + "computer_call", "computer_call_output", "web_search_call", "function_call", + "function_call_output", "reasoning", "compaction", "image_generation_call", + "code_interpreter_call", "local_shell_call", "local_shell_call_output", "shell_call", + "shell_call_output", "apply_patch_call", "apply_patch_call_output", "mcp_list_tools", + "mcp_approval_request", "mcp_approval_response", "mcp_call", "custom_tool_call_output", + "custom_tool_call", and "item_reference". + :vartype type: str or ~azure.ai.projects.models.InputItemType """ - type: Literal[IndexType.COSMOS_DB] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Type of index. Required. CosmosDB""" - connection_name: str = rest_field(name="connectionName", visibility=["create"]) - """Name of connection to CosmosDB. Required.""" - database_name: str = rest_field(name="databaseName", visibility=["create"]) - """Name of the CosmosDB Database. Required.""" - container_name: str = rest_field(name="containerName", visibility=["create"]) - """Name of CosmosDB Container. Required.""" - embedding_configuration: "_models.EmbeddingConfiguration" = rest_field( - name="embeddingConfiguration", visibility=["create"] - ) - """Embedding model configuration. Required.""" - field_mapping: "_models.FieldMapping" = rest_field(name="fieldMapping", visibility=["create"]) - """Field mapping configuration. Required.""" + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Required. Known values are: \"message\", \"output_message\", \"file_search_call\", + \"computer_call\", \"computer_call_output\", \"web_search_call\", \"function_call\", + \"function_call_output\", \"reasoning\", \"compaction\", \"image_generation_call\", + \"code_interpreter_call\", \"local_shell_call\", \"local_shell_call_output\", \"shell_call\", + \"shell_call_output\", \"apply_patch_call\", \"apply_patch_call_output\", \"mcp_list_tools\", + \"mcp_approval_request\", \"mcp_approval_response\", \"mcp_call\", \"custom_tool_call_output\", + \"custom_tool_call\", and \"item_reference\".""" @overload def __init__( self, *, - connection_name: str, - database_name: str, - container_name: str, - embedding_configuration: "_models.EmbeddingConfiguration", - field_mapping: "_models.FieldMapping", - description: Optional[str] = None, - tags: Optional[dict[str, str]] = None, + type: str, ) -> None: ... @overload @@ -3888,29 +4057,55 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = IndexType.COSMOS_DB # type: ignore -class CreatedBy(_Model): - """CreatedBy. +class EasyInputMessage(InputItem, discriminator="message"): + """Input message. - :ivar agent: The agent that created the item. - :vartype agent: ~azure.ai.projects.models.AgentId - :ivar response_id: The response on which the item is created. - :vartype response_id: str + :ivar role: The role of the message input. One of ``user``, ``assistant``, ``system``, or + ``developer``. Required. Is one of the following types: Literal["user"], + Literal["assistant"], Literal["system"], Literal["developer"] + :vartype role: str or str or str or str + :ivar content: Text, image, or audio input to the model, used to generate a response. + Can also contain previous assistant responses. Required. Is either a str type or a + [InputContent] type. + :vartype content: str or list[~azure.ai.projects.models.InputContent] + :ivar type: The type of the message input. Always ``message``. Required. + :vartype type: str or ~azure.ai.projects.models.MESSAGE + :ivar status: The status of item. One of ``in_progress``, ``completed``, or + ``incomplete``. Populated when items are returned via API. Is one of the following types: + Literal["in_progress"], Literal["completed"], Literal["incomplete"] + :vartype status: str or str or str """ - agent: Optional["_models.AgentId"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The agent that created the item.""" - response_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The response on which the item is created.""" - - @overload - def __init__( - self, + role: Literal["user", "assistant", "system", "developer"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The role of the message input. One of ``user``, ``assistant``, ``system``, or + ``developer``. Required. Is one of the following types: Literal[\"user\"], + Literal[\"assistant\"], Literal[\"system\"], Literal[\"developer\"]""" + content: Union[str, list["_models.InputContent"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Text, image, or audio input to the model, used to generate a response. + Can also contain previous assistant responses. Required. Is either a str type or a + [InputContent] type.""" + type: Literal[InputItemType.MESSAGE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the message input. Always ``message``. Required.""" + status: Optional[Literal["in_progress", "completed", "incomplete"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The status of item. One of ``in_progress``, ``completed``, or + ``incomplete``. Populated when items are returned via API. Is one of the following types: + Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"]""" + + @overload + def __init__( + self, *, - agent: Optional["_models.AgentId"] = None, - response_id: Optional[str] = None, + role: Literal["user", "assistant", "system", "developer"], + content: Union[str, list["_models.InputContent"]], + status: Optional[Literal["in_progress", "completed", "incomplete"]] = None, ) -> None: ... @overload @@ -3922,28 +4117,31 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) + self.type = InputItemType.MESSAGE # type: ignore -class Trigger(_Model): - """Base model for Trigger of the schedule. - - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - CronTrigger, OneTimeTrigger, RecurrenceTrigger +class EmbeddingConfiguration(_Model): + """Embedding configuration class. - :ivar type: Type of the trigger. Required. Known values are: "Cron", "Recurrence", and - "OneTime". - :vartype type: str or ~azure.ai.projects.models.TriggerType + :ivar model_deployment_name: Deployment name of embedding model. It can point to a model + deployment either in the parent AIServices or a connection. Required. + :vartype model_deployment_name: str + :ivar embedding_field: Embedding field. Required. + :vartype embedding_field: str """ - __mapping__: dict[str, _Model] = {} - type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) - """Type of the trigger. Required. Known values are: \"Cron\", \"Recurrence\", and \"OneTime\".""" + model_deployment_name: str = rest_field(name="modelDeploymentName", visibility=["create"]) + """Deployment name of embedding model. It can point to a model deployment either in the parent + AIServices or a connection. Required.""" + embedding_field: str = rest_field(name="embeddingField", visibility=["create"]) + """Embedding field. Required.""" @overload def __init__( self, *, - type: str, + model_deployment_name: str, + embedding_field: str, ) -> None: ... @overload @@ -3957,40 +4155,19 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class CronTrigger(Trigger, discriminator="Cron"): - """Cron based trigger. +class EntraIDCredentials(BaseCredentials, discriminator="AAD"): + """Entra ID credential definition. - :ivar type: Required. Cron based trigger. - :vartype type: str or ~azure.ai.projects.models.CRON - :ivar expression: Cron expression that defines the schedule frequency. Required. - :vartype expression: str - :ivar time_zone: Time zone for the cron schedule. - :vartype time_zone: str - :ivar start_time: Start time for the cron schedule in ISO 8601 format. - :vartype start_time: str - :ivar end_time: End time for the cron schedule in ISO 8601 format. - :vartype end_time: str + :ivar type: The credential type. Required. Entra ID credential (formerly known as AAD) + :vartype type: str or ~azure.ai.projects.models.ENTRA_ID """ - type: Literal[TriggerType.CRON] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required. Cron based trigger.""" - expression: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Cron expression that defines the schedule frequency. Required.""" - time_zone: Optional[str] = rest_field(name="timeZone", visibility=["read", "create", "update", "delete", "query"]) - """Time zone for the cron schedule.""" - start_time: Optional[str] = rest_field(name="startTime", visibility=["read", "create", "update", "delete", "query"]) - """Start time for the cron schedule in ISO 8601 format.""" - end_time: Optional[str] = rest_field(name="endTime", visibility=["read", "create", "update", "delete", "query"]) - """End time for the cron schedule in ISO 8601 format.""" + type: Literal[CredentialType.ENTRA_ID] = rest_discriminator(name="type", visibility=["read"]) # type: ignore + """The credential type. Required. Entra ID credential (formerly known as AAD)""" @overload def __init__( self, - *, - expression: str, - time_zone: Optional[str] = None, - start_time: Optional[str] = None, - end_time: Optional[str] = None, ) -> None: ... @overload @@ -4002,22 +4179,55 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = TriggerType.CRON # type: ignore + self.type = CredentialType.ENTRA_ID # type: ignore -class CustomCredential(BaseCredentials, discriminator="CustomKeys"): - """Custom credential definition. +class Error(_Model): + """Error. - :ivar type: The credential type. Required. Custom credential - :vartype type: str or ~azure.ai.projects.models.CUSTOM + :ivar code: Required. + :vartype code: str + :ivar message: Required. + :vartype message: str + :ivar param: Required. + :vartype param: str + :ivar type: Required. + :vartype type: str + :ivar details: + :vartype details: list[~azure.ai.projects.models.Error] + :ivar additional_info: + :vartype additional_info: dict[str, any] + :ivar debug_info: + :vartype debug_info: dict[str, any] """ - type: Literal[CredentialType.CUSTOM] = rest_discriminator(name="type", visibility=["read"]) # type: ignore - """The credential type. Required. Custom credential""" + code: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required.""" + message: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required.""" + param: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required.""" + type: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required.""" + details: Optional[list["_models.Error"]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + additional_info: Optional[dict[str, Any]] = rest_field( + name="additionalInfo", visibility=["read", "create", "update", "delete", "query"] + ) + debug_info: Optional[dict[str, Any]] = rest_field( + name="debugInfo", visibility=["read", "create", "update", "delete", "query"] + ) @overload def __init__( self, + *, + code: str, + message: str, + param: str, + type: str, + details: Optional[list["_models.Error"]] = None, + additional_info: Optional[dict[str, Any]] = None, + debug_info: Optional[dict[str, Any]] = None, ) -> None: ... @overload @@ -4029,31 +4239,34 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = CredentialType.CUSTOM # type: ignore - -class RecurrenceSchedule(_Model): - """Recurrence schedule model. - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - DailyRecurrenceSchedule, HourlyRecurrenceSchedule, MonthlyRecurrenceSchedule, - WeeklyRecurrenceSchedule +class EvalCompareReport(InsightResult, discriminator="EvaluationComparison"): + """Insights from the evaluation comparison. - :ivar type: Recurrence type for the recurrence schedule. Required. Known values are: "Hourly", - "Daily", "Weekly", and "Monthly". - :vartype type: str or ~azure.ai.projects.models.RecurrenceType + :ivar type: The type of insights result. Required. Evaluation Comparison. + :vartype type: str or ~azure.ai.projects.models.EVALUATION_COMPARISON + :ivar comparisons: Comparison results for each treatment run against the baseline. Required. + :vartype comparisons: list[~azure.ai.projects.models.EvalRunResultComparison] + :ivar method: The statistical method used for comparison. Required. + :vartype method: str """ - __mapping__: dict[str, _Model] = {} - type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) - """Recurrence type for the recurrence schedule. Required. Known values are: \"Hourly\", \"Daily\", - \"Weekly\", and \"Monthly\".""" + type: Literal[InsightType.EVALUATION_COMPARISON] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of insights result. Required. Evaluation Comparison.""" + comparisons: list["_models.EvalRunResultComparison"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Comparison results for each treatment run against the baseline. Required.""" + method: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The statistical method used for comparison. Required.""" @overload def __init__( self, *, - type: str, + comparisons: list["_models.EvalRunResultComparison"], + method: str, ) -> None: ... @overload @@ -4065,27 +4278,39 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) + self.type = InsightType.EVALUATION_COMPARISON # type: ignore -class DailyRecurrenceSchedule(RecurrenceSchedule, discriminator="Daily"): - """Daily recurrence schedule. +class EvalResult(_Model): + """Result of the evaluation. - :ivar type: Daily recurrence type. Required. Daily recurrence pattern. - :vartype type: str or ~azure.ai.projects.models.DAILY - :ivar hours: Hours for the recurrence schedule. Required. - :vartype hours: list[int] + :ivar name: name of the check. Required. + :vartype name: str + :ivar type: type of the check. Required. + :vartype type: str + :ivar score: score. Required. + :vartype score: float + :ivar passed: indicates if the check passed or failed. Required. + :vartype passed: bool """ - type: Literal[RecurrenceType.DAILY] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Daily recurrence type. Required. Daily recurrence pattern.""" - hours: list[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Hours for the recurrence schedule. Required.""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """name of the check. Required.""" + type: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """type of the check. Required.""" + score: float = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """score. Required.""" + passed: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """indicates if the check passed or failed. Required.""" @overload def __init__( self, *, - hours: list[int], + name: str, + type: str, + score: float, + passed: bool, ) -> None: ... @overload @@ -4097,26 +4322,51 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = RecurrenceType.DAILY # type: ignore -class DatasetCredential(_Model): - """Represents a reference to a blob for consumption. +class EvalRunResultCompareItem(_Model): + """Metric comparison for a treatment against the baseline. - :ivar blob_reference: Credential info to access the storage account. Required. - :vartype blob_reference: ~azure.ai.projects.models.BlobReference + :ivar treatment_run_id: The treatment run ID. Required. + :vartype treatment_run_id: str + :ivar treatment_run_summary: Summary statistics of the treatment run. Required. + :vartype treatment_run_summary: ~azure.ai.projects.models.EvalRunResultSummary + :ivar delta_estimate: Estimated difference between treatment and baseline. Required. + :vartype delta_estimate: float + :ivar p_value: P-value for the treatment effect. Required. + :vartype p_value: float + :ivar treatment_effect: Type of treatment effect. Required. Known values are: "TooFewSamples", + "Inconclusive", "Changed", "Improved", and "Degraded". + :vartype treatment_effect: str or ~azure.ai.projects.models.TreatmentEffectType """ - blob_reference: "_models.BlobReference" = rest_field( - name="blobReference", visibility=["read", "create", "update", "delete", "query"] + treatment_run_id: str = rest_field( + name="treatmentRunId", visibility=["read", "create", "update", "delete", "query"] ) - """Credential info to access the storage account. Required.""" + """The treatment run ID. Required.""" + treatment_run_summary: "_models.EvalRunResultSummary" = rest_field( + name="treatmentRunSummary", visibility=["read", "create", "update", "delete", "query"] + ) + """Summary statistics of the treatment run. Required.""" + delta_estimate: float = rest_field(name="deltaEstimate", visibility=["read", "create", "update", "delete", "query"]) + """Estimated difference between treatment and baseline. Required.""" + p_value: float = rest_field(name="pValue", visibility=["read", "create", "update", "delete", "query"]) + """P-value for the treatment effect. Required.""" + treatment_effect: Union[str, "_models.TreatmentEffectType"] = rest_field( + name="treatmentEffect", visibility=["read", "create", "update", "delete", "query"] + ) + """Type of treatment effect. Required. Known values are: \"TooFewSamples\", \"Inconclusive\", + \"Changed\", \"Improved\", and \"Degraded\".""" @overload def __init__( self, *, - blob_reference: "_models.BlobReference", + treatment_run_id: str, + treatment_run_summary: "_models.EvalRunResultSummary", + delta_estimate: float, + p_value: float, + treatment_effect: Union[str, "_models.TreatmentEffectType"], ) -> None: ... @overload @@ -4130,2116 +4380,47 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class DatasetVersion(_Model): - """DatasetVersion Definition. +class EvalRunResultComparison(_Model): + """Comparison results for treatment runs against the baseline. - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - FileDatasetVersion, FolderDatasetVersion + :ivar testing_criteria: Name of the testing criteria. Required. + :vartype testing_criteria: str + :ivar metric: Metric being evaluated. Required. + :vartype metric: str + :ivar evaluator: Name of the evaluator for this testing criteria. Required. + :vartype evaluator: str + :ivar baseline_run_summary: Summary statistics of the baseline run. Required. + :vartype baseline_run_summary: ~azure.ai.projects.models.EvalRunResultSummary + :ivar compare_items: List of comparison results for each treatment run. Required. + :vartype compare_items: list[~azure.ai.projects.models.EvalRunResultCompareItem] + """ - :ivar data_uri: URI of the data (`example `_). - Required. - :vartype data_uri: str - :ivar type: Dataset type. Required. Known values are: "uri_file" and "uri_folder". - :vartype type: str or ~azure.ai.projects.models.DatasetType - :ivar is_reference: Indicates if the dataset holds a reference to the storage, or the dataset - manages storage itself. If true, the underlying data will not be deleted when the dataset - version is deleted. - :vartype is_reference: bool - :ivar connection_name: The Azure Storage Account connection name. Required if - startPendingUploadVersion was not called before creating the Dataset. - :vartype connection_name: str - :ivar id: Asset ID, a unique identifier for the asset. - :vartype id: str - :ivar name: The name of the resource. Required. - :vartype name: str - :ivar version: The version of the resource. Required. - :vartype version: str - :ivar description: The asset description text. - :vartype description: str - :ivar tags: Tag dictionary. Tags can be added, removed, and updated. - :vartype tags: dict[str, str] - """ - - __mapping__: dict[str, _Model] = {} - data_uri: str = rest_field(name="dataUri", visibility=["read", "create"]) - """URI of the data (`example `_). Required.""" - type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) - """Dataset type. Required. Known values are: \"uri_file\" and \"uri_folder\".""" - is_reference: Optional[bool] = rest_field(name="isReference", visibility=["read"]) - """Indicates if the dataset holds a reference to the storage, or the dataset manages storage - itself. If true, the underlying data will not be deleted when the dataset version is deleted.""" - connection_name: Optional[str] = rest_field(name="connectionName", visibility=["read", "create"]) - """The Azure Storage Account connection name. Required if startPendingUploadVersion was not called - before creating the Dataset.""" - id: Optional[str] = rest_field(visibility=["read"]) - """Asset ID, a unique identifier for the asset.""" - name: str = rest_field(visibility=["read"]) - """The name of the resource. Required.""" - version: str = rest_field(visibility=["read"]) - """The version of the resource. Required.""" - description: Optional[str] = rest_field(visibility=["create", "update"]) - """The asset description text.""" - tags: Optional[dict[str, str]] = rest_field(visibility=["create", "update"]) - """Tag dictionary. Tags can be added, removed, and updated.""" - - @overload - def __init__( - self, - *, - data_uri: str, - type: str, - connection_name: Optional[str] = None, - description: Optional[str] = None, - tags: Optional[dict[str, str]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class DeleteAgentResponse(_Model): - """A deleted agent Object. - - :ivar object: The object type. Always 'agent.deleted'. Required. Default value is - "agent.deleted". - :vartype object: str - :ivar name: The name of the agent. Required. - :vartype name: str - :ivar deleted: Whether the agent was successfully deleted. Required. - :vartype deleted: bool - """ - - object: Literal["agent.deleted"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The object type. Always 'agent.deleted'. Required. Default value is \"agent.deleted\".""" - name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The name of the agent. Required.""" - deleted: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Whether the agent was successfully deleted. Required.""" - - @overload - def __init__( - self, - *, - name: str, - deleted: bool, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.object: Literal["agent.deleted"] = "agent.deleted" - - -class DeleteAgentVersionResponse(_Model): - """A deleted agent version Object. - - :ivar object: The object type. Always 'agent.deleted'. Required. Default value is - "agent.version.deleted". - :vartype object: str - :ivar name: The name of the agent. Required. - :vartype name: str - :ivar version: The version identifier of the agent. Required. - :vartype version: str - :ivar deleted: Whether the agent was successfully deleted. Required. - :vartype deleted: bool - """ - - object: Literal["agent.version.deleted"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The object type. Always 'agent.deleted'. Required. Default value is \"agent.version.deleted\".""" - name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The name of the agent. Required.""" - version: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The version identifier of the agent. Required.""" - deleted: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Whether the agent was successfully deleted. Required.""" - - @overload - def __init__( - self, - *, - name: str, - version: str, - deleted: bool, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.object: Literal["agent.version.deleted"] = "agent.version.deleted" - - -class DeleteMemoryStoreResult(_Model): - """DeleteMemoryStoreResult. - - :ivar object: The object type. Always 'memory_store.deleted'. Required. Default value is - "memory_store.deleted". - :vartype object: str - :ivar name: The name of the memory store. Required. - :vartype name: str - :ivar deleted: Whether the memory store was successfully deleted. Required. - :vartype deleted: bool - """ - - object: Literal["memory_store.deleted"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The object type. Always 'memory_store.deleted'. Required. Default value is - \"memory_store.deleted\".""" - name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The name of the memory store. Required.""" - deleted: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Whether the memory store was successfully deleted. Required.""" - - @overload - def __init__( - self, - *, - name: str, - deleted: bool, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.object: Literal["memory_store.deleted"] = "memory_store.deleted" - - -class Deployment(_Model): - """Model Deployment Definition. - - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - ModelDeployment - - :ivar type: The type of the deployment. Required. "ModelDeployment" - :vartype type: str or ~azure.ai.projects.models.DeploymentType - :ivar name: Name of the deployment. Required. - :vartype name: str - """ - - __mapping__: dict[str, _Model] = {} - type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) - """The type of the deployment. Required. \"ModelDeployment\"""" - name: str = rest_field(visibility=["read"]) - """Name of the deployment. Required.""" - - @overload - def __init__( - self, - *, - type: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class EmbeddingConfiguration(_Model): - """Embedding configuration class. - - :ivar model_deployment_name: Deployment name of embedding model. It can point to a model - deployment either in the parent AIServices or a connection. Required. - :vartype model_deployment_name: str - :ivar embedding_field: Embedding field. Required. - :vartype embedding_field: str - """ - - model_deployment_name: str = rest_field(name="modelDeploymentName", visibility=["create"]) - """Deployment name of embedding model. It can point to a model deployment either in the parent - AIServices or a connection. Required.""" - embedding_field: str = rest_field(name="embeddingField", visibility=["create"]) - """Embedding field. Required.""" - - @overload - def __init__( - self, - *, - model_deployment_name: str, - embedding_field: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class EntraIDCredentials(BaseCredentials, discriminator="AAD"): - """Entra ID credential definition. - - :ivar type: The credential type. Required. Entra ID credential (formerly known as AAD) - :vartype type: str or ~azure.ai.projects.models.ENTRA_ID - """ - - type: Literal[CredentialType.ENTRA_ID] = rest_discriminator(name="type", visibility=["read"]) # type: ignore - """The credential type. Required. Entra ID credential (formerly known as AAD)""" - - @overload - def __init__( - self, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = CredentialType.ENTRA_ID # type: ignore - - -class Error(_Model): - """Error. - - :ivar code: Required. - :vartype code: str - :ivar message: Required. - :vartype message: str - :ivar param: Required. - :vartype param: str - :ivar type: Required. - :vartype type: str - :ivar details: - :vartype details: list[~azure.ai.projects.models.Error] - :ivar additional_info: - :vartype additional_info: dict[str, any] - :ivar debug_info: - :vartype debug_info: dict[str, any] - """ - - code: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Required.""" - message: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Required.""" - param: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Required.""" - type: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Required.""" - details: Optional[list["_models.Error"]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - additional_info: Optional[dict[str, Any]] = rest_field( - name="additionalInfo", visibility=["read", "create", "update", "delete", "query"] - ) - debug_info: Optional[dict[str, Any]] = rest_field( - name="debugInfo", visibility=["read", "create", "update", "delete", "query"] - ) - - @overload - def __init__( - self, - *, - code: str, - message: str, - param: str, - type: str, - details: Optional[list["_models.Error"]] = None, - additional_info: Optional[dict[str, Any]] = None, - debug_info: Optional[dict[str, Any]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class EvalCompareReport(InsightResult, discriminator="EvaluationComparison"): - """Insights from the evaluation comparison. - - :ivar type: The type of insights result. Required. Evaluation Comparison. - :vartype type: str or ~azure.ai.projects.models.EVALUATION_COMPARISON - :ivar comparisons: Comparison results for each treatment run against the baseline. Required. - :vartype comparisons: list[~azure.ai.projects.models.EvalRunResultComparison] - :ivar method: The statistical method used for comparison. Required. - :vartype method: str - """ - - type: Literal[InsightType.EVALUATION_COMPARISON] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of insights result. Required. Evaluation Comparison.""" - comparisons: list["_models.EvalRunResultComparison"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """Comparison results for each treatment run against the baseline. Required.""" - method: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The statistical method used for comparison. Required.""" - - @overload - def __init__( - self, - *, - comparisons: list["_models.EvalRunResultComparison"], - method: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = InsightType.EVALUATION_COMPARISON # type: ignore - - -class EvalResult(_Model): - """Result of the evaluation. - - :ivar name: name of the check. Required. - :vartype name: str - :ivar type: type of the check. Required. - :vartype type: str - :ivar score: score. Required. - :vartype score: float - :ivar passed: indicates if the check passed or failed. Required. - :vartype passed: bool - """ - - name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """name of the check. Required.""" - type: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """type of the check. Required.""" - score: float = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """score. Required.""" - passed: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """indicates if the check passed or failed. Required.""" - - @overload - def __init__( - self, - *, - name: str, - type: str, - score: float, - passed: bool, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class EvalRunResultCompareItem(_Model): - """Metric comparison for a treatment against the baseline. - - :ivar treatment_run_id: The treatment run ID. Required. - :vartype treatment_run_id: str - :ivar treatment_run_summary: Summary statistics of the treatment run. Required. - :vartype treatment_run_summary: ~azure.ai.projects.models.EvalRunResultSummary - :ivar delta_estimate: Estimated difference between treatment and baseline. Required. - :vartype delta_estimate: float - :ivar p_value: P-value for the treatment effect. Required. - :vartype p_value: float - :ivar treatment_effect: Type of treatment effect. Required. Known values are: "TooFewSamples", - "Inconclusive", "Changed", "Improved", and "Degraded". - :vartype treatment_effect: str or ~azure.ai.projects.models.TreatmentEffectType - """ - - treatment_run_id: str = rest_field( - name="treatmentRunId", visibility=["read", "create", "update", "delete", "query"] - ) - """The treatment run ID. Required.""" - treatment_run_summary: "_models.EvalRunResultSummary" = rest_field( - name="treatmentRunSummary", visibility=["read", "create", "update", "delete", "query"] - ) - """Summary statistics of the treatment run. Required.""" - delta_estimate: float = rest_field(name="deltaEstimate", visibility=["read", "create", "update", "delete", "query"]) - """Estimated difference between treatment and baseline. Required.""" - p_value: float = rest_field(name="pValue", visibility=["read", "create", "update", "delete", "query"]) - """P-value for the treatment effect. Required.""" - treatment_effect: Union[str, "_models.TreatmentEffectType"] = rest_field( - name="treatmentEffect", visibility=["read", "create", "update", "delete", "query"] - ) - """Type of treatment effect. Required. Known values are: \"TooFewSamples\", \"Inconclusive\", - \"Changed\", \"Improved\", and \"Degraded\".""" - - @overload - def __init__( - self, - *, - treatment_run_id: str, - treatment_run_summary: "_models.EvalRunResultSummary", - delta_estimate: float, - p_value: float, - treatment_effect: Union[str, "_models.TreatmentEffectType"], - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class EvalRunResultComparison(_Model): - """Comparison results for treatment runs against the baseline. - - :ivar testing_criteria: Name of the testing criteria. Required. - :vartype testing_criteria: str - :ivar metric: Metric being evaluated. Required. - :vartype metric: str - :ivar evaluator: Name of the evaluator for this testing criteria. Required. - :vartype evaluator: str - :ivar baseline_run_summary: Summary statistics of the baseline run. Required. - :vartype baseline_run_summary: ~azure.ai.projects.models.EvalRunResultSummary - :ivar compare_items: List of comparison results for each treatment run. Required. - :vartype compare_items: list[~azure.ai.projects.models.EvalRunResultCompareItem] - """ - - testing_criteria: str = rest_field( - name="testingCriteria", visibility=["read", "create", "update", "delete", "query"] - ) - """Name of the testing criteria. Required.""" - metric: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Metric being evaluated. Required.""" - evaluator: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Name of the evaluator for this testing criteria. Required.""" - baseline_run_summary: "_models.EvalRunResultSummary" = rest_field( - name="baselineRunSummary", visibility=["read", "create", "update", "delete", "query"] - ) - """Summary statistics of the baseline run. Required.""" - compare_items: list["_models.EvalRunResultCompareItem"] = rest_field( - name="compareItems", visibility=["read", "create", "update", "delete", "query"] - ) - """List of comparison results for each treatment run. Required.""" - - @overload - def __init__( - self, - *, - testing_criteria: str, - metric: str, - evaluator: str, - baseline_run_summary: "_models.EvalRunResultSummary", - compare_items: list["_models.EvalRunResultCompareItem"], - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class EvalRunResultSummary(_Model): - """Summary statistics of a metric in an evaluation run. - - :ivar run_id: The evaluation run ID. Required. - :vartype run_id: str - :ivar sample_count: Number of samples in the evaluation run. Required. - :vartype sample_count: int - :ivar average: Average value of the metric in the evaluation run. Required. - :vartype average: float - :ivar standard_deviation: Standard deviation of the metric in the evaluation run. Required. - :vartype standard_deviation: float - """ - - run_id: str = rest_field(name="runId", visibility=["read", "create", "update", "delete", "query"]) - """The evaluation run ID. Required.""" - sample_count: int = rest_field(name="sampleCount", visibility=["read", "create", "update", "delete", "query"]) - """Number of samples in the evaluation run. Required.""" - average: float = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Average value of the metric in the evaluation run. Required.""" - standard_deviation: float = rest_field( - name="standardDeviation", visibility=["read", "create", "update", "delete", "query"] - ) - """Standard deviation of the metric in the evaluation run. Required.""" - - @overload - def __init__( - self, - *, - run_id: str, - sample_count: int, - average: float, - standard_deviation: float, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class EvaluationComparisonRequest(InsightRequest, discriminator="EvaluationComparison"): - """Evaluation Comparison Request. - - :ivar type: The type of request. Required. Evaluation Comparison. - :vartype type: str or ~azure.ai.projects.models.EVALUATION_COMPARISON - :ivar eval_id: Identifier for the evaluation. Required. - :vartype eval_id: str - :ivar baseline_run_id: The baseline run ID for comparison. Required. - :vartype baseline_run_id: str - :ivar treatment_run_ids: List of treatment run IDs for comparison. Required. - :vartype treatment_run_ids: list[str] - """ - - type: Literal[InsightType.EVALUATION_COMPARISON] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of request. Required. Evaluation Comparison.""" - eval_id: str = rest_field(name="evalId", visibility=["read", "create", "update", "delete", "query"]) - """Identifier for the evaluation. Required.""" - baseline_run_id: str = rest_field(name="baselineRunId", visibility=["read", "create", "update", "delete", "query"]) - """The baseline run ID for comparison. Required.""" - treatment_run_ids: list[str] = rest_field( - name="treatmentRunIds", visibility=["read", "create", "update", "delete", "query"] - ) - """List of treatment run IDs for comparison. Required.""" - - @overload - def __init__( - self, - *, - eval_id: str, - baseline_run_id: str, - treatment_run_ids: list[str], - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = InsightType.EVALUATION_COMPARISON # type: ignore - - -class InsightSample(_Model): - """A sample from the analysis. - - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - EvaluationResultSample - - :ivar id: The unique identifier for the analysis sample. Required. - :vartype id: str - :ivar type: Sample type. Required. "EvaluationResultSample" - :vartype type: str or ~azure.ai.projects.models.SampleType - :ivar features: Features to help with additional filtering of data in UX. Required. - :vartype features: dict[str, any] - :ivar correlation_info: Info about the correlation for the analysis sample. Required. - :vartype correlation_info: dict[str, any] - """ - - __mapping__: dict[str, _Model] = {} - id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique identifier for the analysis sample. Required.""" - type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) - """Sample type. Required. \"EvaluationResultSample\"""" - features: dict[str, Any] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Features to help with additional filtering of data in UX. Required.""" - correlation_info: dict[str, Any] = rest_field( - name="correlationInfo", visibility=["read", "create", "update", "delete", "query"] - ) - """Info about the correlation for the analysis sample. Required.""" - - @overload - def __init__( - self, - *, - id: str, # pylint: disable=redefined-builtin - type: str, - features: dict[str, Any], - correlation_info: dict[str, Any], - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class EvaluationResultSample(InsightSample, discriminator="EvaluationResultSample"): - """A sample from the evaluation result. - - :ivar id: The unique identifier for the analysis sample. Required. - :vartype id: str - :ivar features: Features to help with additional filtering of data in UX. Required. - :vartype features: dict[str, any] - :ivar correlation_info: Info about the correlation for the analysis sample. Required. - :vartype correlation_info: dict[str, any] - :ivar type: Evaluation Result Sample Type. Required. A sample from the evaluation result. - :vartype type: str or ~azure.ai.projects.models.EVALUATION_RESULT_SAMPLE - :ivar evaluation_result: Evaluation result for the analysis sample. Required. - :vartype evaluation_result: ~azure.ai.projects.models.EvalResult - """ - - type: Literal[SampleType.EVALUATION_RESULT_SAMPLE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Evaluation Result Sample Type. Required. A sample from the evaluation result.""" - evaluation_result: "_models.EvalResult" = rest_field( - name="evaluationResult", visibility=["read", "create", "update", "delete", "query"] - ) - """Evaluation result for the analysis sample. Required.""" - - @overload - def __init__( - self, - *, - id: str, # pylint: disable=redefined-builtin - features: dict[str, Any], - correlation_info: dict[str, Any], - evaluation_result: "_models.EvalResult", - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = SampleType.EVALUATION_RESULT_SAMPLE # type: ignore - - -class EvaluationRule(_Model): - """Evaluation rule model. - - :ivar id: Unique identifier for the evaluation rule. Required. - :vartype id: str - :ivar display_name: Display Name for the evaluation rule. - :vartype display_name: str - :ivar description: Description for the evaluation rule. - :vartype description: str - :ivar action: Definition of the evaluation rule action. Required. - :vartype action: ~azure.ai.projects.models.EvaluationRuleAction - :ivar filter: Filter condition of the evaluation rule. - :vartype filter: ~azure.ai.projects.models.EvaluationRuleFilter - :ivar event_type: Event type that the evaluation rule applies to. Required. Known values are: - "responseCompleted" and "manual". - :vartype event_type: str or ~azure.ai.projects.models.EvaluationRuleEventType - :ivar enabled: Indicates whether the evaluation rule is enabled. Default is true. Required. - :vartype enabled: bool - :ivar system_data: System metadata for the evaluation rule. Required. - :vartype system_data: dict[str, str] - """ - - id: str = rest_field(visibility=["read"]) - """Unique identifier for the evaluation rule. Required.""" - display_name: Optional[str] = rest_field( - name="displayName", visibility=["read", "create", "update", "delete", "query"] - ) - """Display Name for the evaluation rule.""" - description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Description for the evaluation rule.""" - action: "_models.EvaluationRuleAction" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Definition of the evaluation rule action. Required.""" - filter: Optional["_models.EvaluationRuleFilter"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """Filter condition of the evaluation rule.""" - event_type: Union[str, "_models.EvaluationRuleEventType"] = rest_field( - name="eventType", visibility=["read", "create", "update", "delete", "query"] - ) - """Event type that the evaluation rule applies to. Required. Known values are: - \"responseCompleted\" and \"manual\".""" - enabled: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Indicates whether the evaluation rule is enabled. Default is true. Required.""" - system_data: dict[str, str] = rest_field(name="systemData", visibility=["read"]) - """System metadata for the evaluation rule. Required.""" - - @overload - def __init__( - self, - *, - action: "_models.EvaluationRuleAction", - event_type: Union[str, "_models.EvaluationRuleEventType"], - enabled: bool, - display_name: Optional[str] = None, - description: Optional[str] = None, - filter: Optional["_models.EvaluationRuleFilter"] = None, # pylint: disable=redefined-builtin - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class EvaluationRuleFilter(_Model): - """Evaluation filter model. - - :ivar agent_name: Filter by agent name. Required. - :vartype agent_name: str - """ - - agent_name: str = rest_field(name="agentName", visibility=["read", "create", "update", "delete", "query"]) - """Filter by agent name. Required.""" - - @overload - def __init__( - self, - *, - agent_name: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class EvaluationRunClusterInsightResult(InsightResult, discriminator="EvaluationRunClusterInsight"): - """Insights from the evaluation run cluster analysis. - - :ivar type: The type of insights result. Required. Insights on an Evaluation run result. - :vartype type: str or ~azure.ai.projects.models.EVALUATION_RUN_CLUSTER_INSIGHT - :ivar cluster_insight: Required. - :vartype cluster_insight: ~azure.ai.projects.models.ClusterInsightResult - """ - - type: Literal[InsightType.EVALUATION_RUN_CLUSTER_INSIGHT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of insights result. Required. Insights on an Evaluation run result.""" - cluster_insight: "_models.ClusterInsightResult" = rest_field( - name="clusterInsight", visibility=["read", "create", "update", "delete", "query"] - ) - """Required.""" - - @overload - def __init__( - self, - *, - cluster_insight: "_models.ClusterInsightResult", - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = InsightType.EVALUATION_RUN_CLUSTER_INSIGHT # type: ignore - - -class EvaluationRunClusterInsightsRequest(InsightRequest, discriminator="EvaluationRunClusterInsight"): - """Insights on set of Evaluation Results. - - :ivar type: The type of insights request. Required. Insights on an Evaluation run result. - :vartype type: str or ~azure.ai.projects.models.EVALUATION_RUN_CLUSTER_INSIGHT - :ivar eval_id: Evaluation Id for the insights. Required. - :vartype eval_id: str - :ivar run_ids: List of evaluation run IDs for the insights. Required. - :vartype run_ids: list[str] - :ivar model_configuration: Configuration of the model used in the insight generation. - :vartype model_configuration: ~azure.ai.projects.models.InsightModelConfiguration - """ - - type: Literal[InsightType.EVALUATION_RUN_CLUSTER_INSIGHT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of insights request. Required. Insights on an Evaluation run result.""" - eval_id: str = rest_field(name="evalId", visibility=["read", "create", "update", "delete", "query"]) - """Evaluation Id for the insights. Required.""" - run_ids: list[str] = rest_field(name="runIds", visibility=["read", "create", "update", "delete", "query"]) - """List of evaluation run IDs for the insights. Required.""" - model_configuration: Optional["_models.InsightModelConfiguration"] = rest_field( - name="modelConfiguration", visibility=["read", "create", "update", "delete", "query"] - ) - """Configuration of the model used in the insight generation.""" - - @overload - def __init__( - self, - *, - eval_id: str, - run_ids: list[str], - model_configuration: Optional["_models.InsightModelConfiguration"] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = InsightType.EVALUATION_RUN_CLUSTER_INSIGHT # type: ignore - - -class ScheduleTask(_Model): - """Schedule task model. - - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - EvaluationScheduleTask, InsightScheduleTask - - :ivar type: Type of the task. Required. Known values are: "Evaluation" and "Insight". - :vartype type: str or ~azure.ai.projects.models.ScheduleTaskType - :ivar configuration: Configuration for the task. - :vartype configuration: dict[str, str] - """ - - __mapping__: dict[str, _Model] = {} - type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) - """Type of the task. Required. Known values are: \"Evaluation\" and \"Insight\".""" - configuration: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Configuration for the task.""" - - @overload - def __init__( - self, - *, - type: str, - configuration: Optional[dict[str, str]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class EvaluationScheduleTask(ScheduleTask, discriminator="Evaluation"): - """Evaluation task for the schedule. - - :ivar configuration: Configuration for the task. - :vartype configuration: dict[str, str] - :ivar type: Required. Evaluation task. - :vartype type: str or ~azure.ai.projects.models.EVALUATION - :ivar eval_id: Identifier of the evaluation group. Required. - :vartype eval_id: str - :ivar eval_run: The evaluation run payload. Required. - :vartype eval_run: any - """ - - type: Literal[ScheduleTaskType.EVALUATION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required. Evaluation task.""" - eval_id: str = rest_field(name="evalId", visibility=["read", "create", "update", "delete", "query"]) - """Identifier of the evaluation group. Required.""" - eval_run: Any = rest_field(name="evalRun", visibility=["read", "create", "update", "delete", "query"]) - """The evaluation run payload. Required.""" - - @overload - def __init__( - self, - *, - eval_id: str, - eval_run: Any, - configuration: Optional[dict[str, str]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ScheduleTaskType.EVALUATION # type: ignore - - -class EvaluationTaxonomy(_Model): - """Evaluation Taxonomy Definition. - - :ivar id: Asset ID, a unique identifier for the asset. - :vartype id: str - :ivar name: The name of the resource. Required. - :vartype name: str - :ivar version: The version of the resource. Required. - :vartype version: str - :ivar description: The asset description text. - :vartype description: str - :ivar tags: Tag dictionary. Tags can be added, removed, and updated. - :vartype tags: dict[str, str] - :ivar taxonomy_input: Input configuration for the evaluation taxonomy. Required. - :vartype taxonomy_input: ~azure.ai.projects.models.EvaluationTaxonomyInput - :ivar taxonomy_categories: List of taxonomy categories. - :vartype taxonomy_categories: list[~azure.ai.projects.models.TaxonomyCategory] - :ivar properties: Additional properties for the evaluation taxonomy. - :vartype properties: dict[str, str] - """ - - id: Optional[str] = rest_field(visibility=["read"]) - """Asset ID, a unique identifier for the asset.""" - name: str = rest_field(visibility=["read"]) - """The name of the resource. Required.""" - version: str = rest_field(visibility=["read"]) - """The version of the resource. Required.""" - description: Optional[str] = rest_field(visibility=["create", "update"]) - """The asset description text.""" - tags: Optional[dict[str, str]] = rest_field(visibility=["create", "update"]) - """Tag dictionary. Tags can be added, removed, and updated.""" - taxonomy_input: "_models.EvaluationTaxonomyInput" = rest_field( - name="taxonomyInput", visibility=["read", "create", "update", "delete", "query"] - ) - """Input configuration for the evaluation taxonomy. Required.""" - taxonomy_categories: Optional[list["_models.TaxonomyCategory"]] = rest_field( - name="taxonomyCategories", visibility=["read", "create", "update", "delete", "query"] - ) - """List of taxonomy categories.""" - properties: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Additional properties for the evaluation taxonomy.""" - - @overload - def __init__( - self, - *, - taxonomy_input: "_models.EvaluationTaxonomyInput", - description: Optional[str] = None, - tags: Optional[dict[str, str]] = None, - taxonomy_categories: Optional[list["_models.TaxonomyCategory"]] = None, - properties: Optional[dict[str, str]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class EvaluatorMetric(_Model): - """Evaluator Metric. - - :ivar type: Type of the metric. Known values are: "ordinal", "continuous", and "boolean". - :vartype type: str or ~azure.ai.projects.models.EvaluatorMetricType - :ivar desirable_direction: It indicates whether a higher value is better or a lower value is - better for this metric. Known values are: "increase", "decrease", and "neutral". - :vartype desirable_direction: str or ~azure.ai.projects.models.EvaluatorMetricDirection - :ivar min_value: Minimum value for the metric. - :vartype min_value: float - :ivar max_value: Maximum value for the metric. If not specified, it is assumed to be unbounded. - :vartype max_value: float - :ivar is_primary: Indicates if this metric is primary when there are multiple metrics. - :vartype is_primary: bool - """ - - type: Optional[Union[str, "_models.EvaluatorMetricType"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """Type of the metric. Known values are: \"ordinal\", \"continuous\", and \"boolean\".""" - desirable_direction: Optional[Union[str, "_models.EvaluatorMetricDirection"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """It indicates whether a higher value is better or a lower value is better for this metric. Known - values are: \"increase\", \"decrease\", and \"neutral\".""" - min_value: Optional[float] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Minimum value for the metric.""" - max_value: Optional[float] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Maximum value for the metric. If not specified, it is assumed to be unbounded.""" - is_primary: Optional[bool] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Indicates if this metric is primary when there are multiple metrics.""" - - @overload - def __init__( - self, - *, - type: Optional[Union[str, "_models.EvaluatorMetricType"]] = None, - desirable_direction: Optional[Union[str, "_models.EvaluatorMetricDirection"]] = None, - min_value: Optional[float] = None, - max_value: Optional[float] = None, - is_primary: Optional[bool] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class EvaluatorVersion(_Model): - """Evaluator Definition. - - :ivar display_name: Display Name for evaluator. It helps to find the evaluator easily in AI - Foundry. It does not need to be unique. - :vartype display_name: str - :ivar metadata: Metadata about the evaluator. - :vartype metadata: dict[str, str] - :ivar evaluator_type: The type of the evaluator. Required. Known values are: "builtin" and - "custom". - :vartype evaluator_type: str or ~azure.ai.projects.models.EvaluatorType - :ivar categories: The categories of the evaluator. Required. - :vartype categories: list[str or ~azure.ai.projects.models.EvaluatorCategory] - :ivar definition: Definition of the evaluator. Required. - :vartype definition: ~azure.ai.projects.models.EvaluatorDefinition - :ivar created_by: Creator of the evaluator. Required. - :vartype created_by: str - :ivar created_at: Creation date/time of the evaluator. Required. - :vartype created_at: int - :ivar modified_at: Last modified date/time of the evaluator. Required. - :vartype modified_at: int - :ivar id: Asset ID, a unique identifier for the asset. - :vartype id: str - :ivar name: The name of the resource. Required. - :vartype name: str - :ivar version: The version of the resource. Required. - :vartype version: str - :ivar description: The asset description text. - :vartype description: str - :ivar tags: Tag dictionary. Tags can be added, removed, and updated. - :vartype tags: dict[str, str] - """ - - display_name: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Display Name for evaluator. It helps to find the evaluator easily in AI Foundry. It does not - need to be unique.""" - metadata: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Metadata about the evaluator.""" - evaluator_type: Union[str, "_models.EvaluatorType"] = rest_field(visibility=["read", "create"]) - """The type of the evaluator. Required. Known values are: \"builtin\" and \"custom\".""" - categories: list[Union[str, "_models.EvaluatorCategory"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The categories of the evaluator. Required.""" - definition: "_models.EvaluatorDefinition" = rest_field(visibility=["read", "create"]) - """Definition of the evaluator. Required.""" - created_by: str = rest_field(visibility=["read"]) - """Creator of the evaluator. Required.""" - created_at: int = rest_field(visibility=["read"]) - """Creation date/time of the evaluator. Required.""" - modified_at: int = rest_field(visibility=["read"]) - """Last modified date/time of the evaluator. Required.""" - id: Optional[str] = rest_field(visibility=["read"]) - """Asset ID, a unique identifier for the asset.""" - name: str = rest_field(visibility=["read"]) - """The name of the resource. Required.""" - version: str = rest_field(visibility=["read"]) - """The version of the resource. Required.""" - description: Optional[str] = rest_field(visibility=["create", "update"]) - """The asset description text.""" - tags: Optional[dict[str, str]] = rest_field(visibility=["create", "update"]) - """Tag dictionary. Tags can be added, removed, and updated.""" - - @overload - def __init__( - self, - *, - evaluator_type: Union[str, "_models.EvaluatorType"], - categories: list[Union[str, "_models.EvaluatorCategory"]], - definition: "_models.EvaluatorDefinition", - display_name: Optional[str] = None, - metadata: Optional[dict[str, str]] = None, - description: Optional[str] = None, - tags: Optional[dict[str, str]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class FabricDataAgentToolParameters(_Model): - """The fabric data agent tool parameters. - - :ivar project_connections: The project connections attached to this tool. There can be a - maximum of 1 connection - resource attached to the tool. - :vartype project_connections: list[~azure.ai.projects.models.ToolProjectConnection] - """ - - project_connections: Optional[list["_models.ToolProjectConnection"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The project connections attached to this tool. There can be a maximum of 1 connection - resource attached to the tool.""" - - @overload - def __init__( - self, - *, - project_connections: Optional[list["_models.ToolProjectConnection"]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class FieldMapping(_Model): - """Field mapping configuration class. - - :ivar content_fields: List of fields with text content. Required. - :vartype content_fields: list[str] - :ivar filepath_field: Path of file to be used as a source of text content. - :vartype filepath_field: str - :ivar title_field: Field containing the title of the document. - :vartype title_field: str - :ivar url_field: Field containing the url of the document. - :vartype url_field: str - :ivar vector_fields: List of fields with vector content. - :vartype vector_fields: list[str] - :ivar metadata_fields: List of fields with metadata content. - :vartype metadata_fields: list[str] - """ - - content_fields: list[str] = rest_field(name="contentFields", visibility=["create"]) - """List of fields with text content. Required.""" - filepath_field: Optional[str] = rest_field(name="filepathField", visibility=["create"]) - """Path of file to be used as a source of text content.""" - title_field: Optional[str] = rest_field(name="titleField", visibility=["create"]) - """Field containing the title of the document.""" - url_field: Optional[str] = rest_field(name="urlField", visibility=["create"]) - """Field containing the url of the document.""" - vector_fields: Optional[list[str]] = rest_field(name="vectorFields", visibility=["create"]) - """List of fields with vector content.""" - metadata_fields: Optional[list[str]] = rest_field(name="metadataFields", visibility=["create"]) - """List of fields with metadata content.""" - - @overload - def __init__( - self, - *, - content_fields: list[str], - filepath_field: Optional[str] = None, - title_field: Optional[str] = None, - url_field: Optional[str] = None, - vector_fields: Optional[list[str]] = None, - metadata_fields: Optional[list[str]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class FileDatasetVersion(DatasetVersion, discriminator="uri_file"): - """FileDatasetVersion Definition. - - :ivar data_uri: URI of the data (`example `_). - Required. - :vartype data_uri: str - :ivar is_reference: Indicates if the dataset holds a reference to the storage, or the dataset - manages storage itself. If true, the underlying data will not be deleted when the dataset - version is deleted. - :vartype is_reference: bool - :ivar connection_name: The Azure Storage Account connection name. Required if - startPendingUploadVersion was not called before creating the Dataset. - :vartype connection_name: str - :ivar id: Asset ID, a unique identifier for the asset. - :vartype id: str - :ivar name: The name of the resource. Required. - :vartype name: str - :ivar version: The version of the resource. Required. - :vartype version: str - :ivar description: The asset description text. - :vartype description: str - :ivar tags: Tag dictionary. Tags can be added, removed, and updated. - :vartype tags: dict[str, str] - :ivar type: Dataset type. Required. URI file. - :vartype type: str or ~azure.ai.projects.models.URI_FILE - """ - - type: Literal[DatasetType.URI_FILE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Dataset type. Required. URI file.""" - - @overload - def __init__( - self, - *, - data_uri: str, - connection_name: Optional[str] = None, - description: Optional[str] = None, - tags: Optional[dict[str, str]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = DatasetType.URI_FILE # type: ignore - - -class FileSearchTool(Tool, discriminator="file_search"): - """A tool that searches for relevant content from uploaded files. Learn more about the `file - search tool `_. - - :ivar type: The type of the file search tool. Always ``file_search``. Required. - :vartype type: str or ~azure.ai.projects.models.FILE_SEARCH - :ivar vector_store_ids: The IDs of the vector stores to search. Required. - :vartype vector_store_ids: list[str] - :ivar max_num_results: The maximum number of results to return. This number should be between 1 - and 50 inclusive. - :vartype max_num_results: int - :ivar ranking_options: Ranking options for search. - :vartype ranking_options: ~azure.ai.projects.models.RankingOptions - :ivar filters: A filter to apply. Is either a ComparisonFilter type or a CompoundFilter type. - :vartype filters: ~azure.ai.projects.models.ComparisonFilter or - ~azure.ai.projects.models.CompoundFilter - """ - - type: Literal[ToolType.FILE_SEARCH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the file search tool. Always ``file_search``. Required.""" - vector_store_ids: list[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The IDs of the vector stores to search. Required.""" - max_num_results: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The maximum number of results to return. This number should be between 1 and 50 inclusive.""" - ranking_options: Optional["_models.RankingOptions"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """Ranking options for search.""" - filters: Optional[Union["_models.ComparisonFilter", "_models.CompoundFilter"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """A filter to apply. Is either a ComparisonFilter type or a CompoundFilter type.""" - - @overload - def __init__( - self, - *, - vector_store_ids: list[str], - max_num_results: Optional[int] = None, - ranking_options: Optional["_models.RankingOptions"] = None, - filters: Optional[Union["_models.ComparisonFilter", "_models.CompoundFilter"]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ToolType.FILE_SEARCH # type: ignore - - -class FileSearchToolCallItemParam(ItemParam, discriminator="file_search_call"): - """The results of a file search tool call. See the - `file search guide `_ for more - information. - - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.FILE_SEARCH_CALL - :ivar queries: The queries used to search for files. Required. - :vartype queries: list[str] - :ivar results: The results of the file search tool call. - :vartype results: list[~azure.ai.projects.models.FileSearchToolCallItemParamResult] - """ - - type: Literal[ItemType.FILE_SEARCH_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - queries: list[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The queries used to search for files. Required.""" - results: Optional[list["_models.FileSearchToolCallItemParamResult"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The results of the file search tool call.""" - - @overload - def __init__( - self, - *, - queries: list[str], - results: Optional[list["_models.FileSearchToolCallItemParamResult"]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemType.FILE_SEARCH_CALL # type: ignore - - -class FileSearchToolCallItemParamResult(_Model): - """FileSearchToolCallItemParamResult. - - :ivar file_id: The unique ID of the file. - :vartype file_id: str - :ivar text: The text that was retrieved from the file. - :vartype text: str - :ivar filename: The name of the file. - :vartype filename: str - :ivar attributes: - :vartype attributes: ~azure.ai.projects.models.VectorStoreFileAttributes - :ivar score: The relevance score of the file - a value between 0 and 1. - :vartype score: float - """ - - file_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique ID of the file.""" - text: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The text that was retrieved from the file.""" - filename: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The name of the file.""" - attributes: Optional["_models.VectorStoreFileAttributes"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - score: Optional[float] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The relevance score of the file - a value between 0 and 1.""" - - @overload - def __init__( - self, - *, - file_id: Optional[str] = None, - text: Optional[str] = None, - filename: Optional[str] = None, - attributes: Optional["_models.VectorStoreFileAttributes"] = None, - score: Optional[float] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class FileSearchToolCallItemResource(ItemResource, discriminator="file_search_call"): - """The results of a file search tool call. See the - `file search guide `_ for more - information. - - :ivar id: Required. - :vartype id: str - :ivar created_by: The information about the creator of the item. - :vartype created_by: ~azure.ai.projects.models.CreatedBy - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.FILE_SEARCH_CALL - :ivar status: The status of the file search tool call. One of ``in_progress``, - ``searching``, ``incomplete`` or ``failed``,. Required. Is one of the following types: - Literal["in_progress"], Literal["searching"], Literal["completed"], Literal["incomplete"], - Literal["failed"] - :vartype status: str or str or str or str or str - :ivar queries: The queries used to search for files. Required. - :vartype queries: list[str] - :ivar results: The results of the file search tool call. - :vartype results: list[~azure.ai.projects.models.FileSearchToolCallItemParamResult] - """ - - type: Literal[ItemType.FILE_SEARCH_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - status: Literal["in_progress", "searching", "completed", "incomplete", "failed"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The status of the file search tool call. One of ``in_progress``, - ``searching``, ``incomplete`` or ``failed``,. Required. Is one of the following types: - Literal[\"in_progress\"], Literal[\"searching\"], Literal[\"completed\"], - Literal[\"incomplete\"], Literal[\"failed\"]""" - queries: list[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The queries used to search for files. Required.""" - results: Optional[list["_models.FileSearchToolCallItemParamResult"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The results of the file search tool call.""" - - @overload - def __init__( - self, - *, - id: str, # pylint: disable=redefined-builtin - status: Literal["in_progress", "searching", "completed", "incomplete", "failed"], - queries: list[str], - created_by: Optional["_models.CreatedBy"] = None, - results: Optional[list["_models.FileSearchToolCallItemParamResult"]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemType.FILE_SEARCH_CALL # type: ignore - - -class FolderDatasetVersion(DatasetVersion, discriminator="uri_folder"): - """FileDatasetVersion Definition. - - :ivar data_uri: URI of the data (`example `_). - Required. - :vartype data_uri: str - :ivar is_reference: Indicates if the dataset holds a reference to the storage, or the dataset - manages storage itself. If true, the underlying data will not be deleted when the dataset - version is deleted. - :vartype is_reference: bool - :ivar connection_name: The Azure Storage Account connection name. Required if - startPendingUploadVersion was not called before creating the Dataset. - :vartype connection_name: str - :ivar id: Asset ID, a unique identifier for the asset. - :vartype id: str - :ivar name: The name of the resource. Required. - :vartype name: str - :ivar version: The version of the resource. Required. - :vartype version: str - :ivar description: The asset description text. - :vartype description: str - :ivar tags: Tag dictionary. Tags can be added, removed, and updated. - :vartype tags: dict[str, str] - :ivar type: Dataset type. Required. URI folder. - :vartype type: str or ~azure.ai.projects.models.URI_FOLDER - """ - - type: Literal[DatasetType.URI_FOLDER] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Dataset type. Required. URI folder.""" - - @overload - def __init__( - self, - *, - data_uri: str, - connection_name: Optional[str] = None, - description: Optional[str] = None, - tags: Optional[dict[str, str]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = DatasetType.URI_FOLDER # type: ignore - - -class FunctionTool(Tool, discriminator="function"): - """Defines a function in your own code the model can choose to call. Learn more about `function - calling `_. - - :ivar type: The type of the function tool. Always ``function``. Required. - :vartype type: str or ~azure.ai.projects.models.FUNCTION - :ivar name: The name of the function to call. Required. - :vartype name: str - :ivar description: A description of the function. Used by the model to determine whether or not - to call the function. - :vartype description: str - :ivar parameters: A JSON schema object describing the parameters of the function. Required. - :vartype parameters: any - :ivar strict: Whether to enforce strict parameter validation. Default ``true``. Required. - :vartype strict: bool - """ - - type: Literal[ToolType.FUNCTION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the function tool. Always ``function``. Required.""" - name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The name of the function to call. Required.""" - description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """A description of the function. Used by the model to determine whether or not to call the - function.""" - parameters: Any = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """A JSON schema object describing the parameters of the function. Required.""" - strict: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Whether to enforce strict parameter validation. Default ``true``. Required.""" - - @overload - def __init__( - self, - *, - name: str, - parameters: Any, - strict: bool, - description: Optional[str] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ToolType.FUNCTION # type: ignore - - -class FunctionToolCallItemParam(ItemParam, discriminator="function_call"): - """A tool call to run a function. See the - `function calling guide `_ for more - information. - - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.FUNCTION_CALL - :ivar call_id: The unique ID of the function tool call generated by the model. Required. - :vartype call_id: str - :ivar name: The name of the function to run. Required. - :vartype name: str - :ivar arguments: A JSON string of the arguments to pass to the function. Required. - :vartype arguments: str - """ - - type: Literal[ItemType.FUNCTION_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique ID of the function tool call generated by the model. Required.""" - name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The name of the function to run. Required.""" - arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """A JSON string of the arguments to pass to the function. Required.""" - - @overload - def __init__( - self, - *, - call_id: str, - name: str, - arguments: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemType.FUNCTION_CALL # type: ignore - - -class FunctionToolCallItemResource(ItemResource, discriminator="function_call"): - """A tool call to run a function. See the - `function calling guide `_ for more - information. - - :ivar id: Required. - :vartype id: str - :ivar created_by: The information about the creator of the item. - :vartype created_by: ~azure.ai.projects.models.CreatedBy - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.FUNCTION_CALL - :ivar status: The status of the item. One of ``in_progress``, ``completed``, or - ``incomplete``. Populated when items are returned via API. Required. Is one of the following - types: Literal["in_progress"], Literal["completed"], Literal["incomplete"] - :vartype status: str or str or str - :ivar call_id: The unique ID of the function tool call generated by the model. Required. - :vartype call_id: str - :ivar name: The name of the function to run. Required. - :vartype name: str - :ivar arguments: A JSON string of the arguments to pass to the function. Required. - :vartype arguments: str - """ - - type: Literal[ItemType.FUNCTION_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - status: Literal["in_progress", "completed", "incomplete"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The status of the item. One of ``in_progress``, ``completed``, or - ``incomplete``. Populated when items are returned via API. Required. Is one of the following - types: Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"]""" - call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique ID of the function tool call generated by the model. Required.""" - name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The name of the function to run. Required.""" - arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """A JSON string of the arguments to pass to the function. Required.""" - - @overload - def __init__( - self, - *, - id: str, # pylint: disable=redefined-builtin - status: Literal["in_progress", "completed", "incomplete"], - call_id: str, - name: str, - arguments: str, - created_by: Optional["_models.CreatedBy"] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemType.FUNCTION_CALL # type: ignore - - -class FunctionToolCallOutputItemParam(ItemParam, discriminator="function_call_output"): - """The output of a function tool call. - - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.FUNCTION_CALL_OUTPUT - :ivar call_id: The unique ID of the function tool call generated by the model. Required. - :vartype call_id: str - :ivar output: A JSON string of the output of the function tool call. Required. - :vartype output: str - """ - - type: Literal[ItemType.FUNCTION_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique ID of the function tool call generated by the model. Required.""" - output: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """A JSON string of the output of the function tool call. Required.""" - - @overload - def __init__( - self, - *, - call_id: str, - output: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemType.FUNCTION_CALL_OUTPUT # type: ignore - - -class FunctionToolCallOutputItemResource(ItemResource, discriminator="function_call_output"): - """The output of a function tool call. - - :ivar id: Required. - :vartype id: str - :ivar created_by: The information about the creator of the item. - :vartype created_by: ~azure.ai.projects.models.CreatedBy - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.FUNCTION_CALL_OUTPUT - :ivar status: The status of the item. One of ``in_progress``, ``completed``, or - ``incomplete``. Populated when items are returned via API. Required. Is one of the following - types: Literal["in_progress"], Literal["completed"], Literal["incomplete"] - :vartype status: str or str or str - :ivar call_id: The unique ID of the function tool call generated by the model. Required. - :vartype call_id: str - :ivar output: A JSON string of the output of the function tool call. Required. - :vartype output: str - """ - - type: Literal[ItemType.FUNCTION_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - status: Literal["in_progress", "completed", "incomplete"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The status of the item. One of ``in_progress``, ``completed``, or - ``incomplete``. Populated when items are returned via API. Required. Is one of the following - types: Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"]""" - call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique ID of the function tool call generated by the model. Required.""" - output: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """A JSON string of the output of the function tool call. Required.""" - - @overload - def __init__( - self, - *, - id: str, # pylint: disable=redefined-builtin - status: Literal["in_progress", "completed", "incomplete"], - call_id: str, - output: str, - created_by: Optional["_models.CreatedBy"] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ItemType.FUNCTION_CALL_OUTPUT # type: ignore - - -class HostedAgentDefinition(AgentDefinition, discriminator="hosted"): - """The hosted agent definition. - - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - ImageBasedHostedAgentDefinition - - :ivar rai_config: Configuration for Responsible AI (RAI) content filtering and safety features. - :vartype rai_config: ~azure.ai.projects.models.RaiConfig - :ivar kind: Required. - :vartype kind: str or ~azure.ai.projects.models.HOSTED - :ivar tools: An array of tools the hosted agent's model may call while generating a response. - You - can specify which tool to use by setting the ``tool_choice`` parameter. - :vartype tools: list[~azure.ai.projects.models.Tool] - :ivar container_protocol_versions: The protocols that the agent supports for ingress - communication of the containers. Required. - :vartype container_protocol_versions: list[~azure.ai.projects.models.ProtocolVersionRecord] - :ivar cpu: The CPU configuration for the hosted agent. Required. - :vartype cpu: str - :ivar memory: The memory configuration for the hosted agent. Required. - :vartype memory: str - :ivar environment_variables: Environment variables to set in the hosted agent container. - :vartype environment_variables: dict[str, str] - """ - - __mapping__: dict[str, _Model] = {} - kind: Literal[AgentKind.HOSTED] = rest_discriminator(name="kind", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - tools: Optional[list["_models.Tool"]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """An array of tools the hosted agent's model may call while generating a response. You - can specify which tool to use by setting the ``tool_choice`` parameter.""" - container_protocol_versions: list["_models.ProtocolVersionRecord"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The protocols that the agent supports for ingress communication of the containers. Required.""" - cpu: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The CPU configuration for the hosted agent. Required.""" - memory: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The memory configuration for the hosted agent. Required.""" - environment_variables: Optional[dict[str, str]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """Environment variables to set in the hosted agent container.""" - - @overload - def __init__( - self, - *, - container_protocol_versions: list["_models.ProtocolVersionRecord"], - cpu: str, - memory: str, - rai_config: Optional["_models.RaiConfig"] = None, - tools: Optional[list["_models.Tool"]] = None, - environment_variables: Optional[dict[str, str]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.kind = AgentKind.HOSTED # type: ignore - - -class HourlyRecurrenceSchedule(RecurrenceSchedule, discriminator="Hourly"): - """Hourly recurrence schedule. - - :ivar type: Required. Hourly recurrence pattern. - :vartype type: str or ~azure.ai.projects.models.HOURLY - """ - - type: Literal[RecurrenceType.HOURLY] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required. Hourly recurrence pattern.""" - - @overload - def __init__( - self, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = RecurrenceType.HOURLY # type: ignore - - -class HumanEvaluationRuleAction(EvaluationRuleAction, discriminator="humanEvaluation"): - """Evaluation rule action for human evaluation. - - :ivar type: Required. Human evaluation. - :vartype type: str or ~azure.ai.projects.models.HUMAN_EVALUATION - :ivar template_id: Human evaluation template Id. Required. - :vartype template_id: str - """ - - type: Literal[EvaluationRuleActionType.HUMAN_EVALUATION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required. Human evaluation.""" - template_id: str = rest_field(name="templateId", visibility=["read", "create", "update", "delete", "query"]) - """Human evaluation template Id. Required.""" - - @overload - def __init__( - self, - *, - template_id: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = EvaluationRuleActionType.HUMAN_EVALUATION # type: ignore - - -class ImageBasedHostedAgentDefinition(HostedAgentDefinition, discriminator="hosted"): - """The image-based deployment definition for a hosted agent. - - :ivar rai_config: Configuration for Responsible AI (RAI) content filtering and safety features. - :vartype rai_config: ~azure.ai.projects.models.RaiConfig - :ivar tools: An array of tools the hosted agent's model may call while generating a response. - You - can specify which tool to use by setting the ``tool_choice`` parameter. - :vartype tools: list[~azure.ai.projects.models.Tool] - :ivar container_protocol_versions: The protocols that the agent supports for ingress - communication of the containers. Required. - :vartype container_protocol_versions: list[~azure.ai.projects.models.ProtocolVersionRecord] - :ivar cpu: The CPU configuration for the hosted agent. Required. - :vartype cpu: str - :ivar memory: The memory configuration for the hosted agent. Required. - :vartype memory: str - :ivar environment_variables: Environment variables to set in the hosted agent container. - :vartype environment_variables: dict[str, str] - :ivar kind: Required. - :vartype kind: str or ~azure.ai.projects.models.HOSTED - :ivar image: The image for the hosted agent. Required. - :vartype image: str - """ - - image: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The image for the hosted agent. Required.""" - - @overload - def __init__( - self, - *, - container_protocol_versions: list["_models.ProtocolVersionRecord"], - cpu: str, - memory: str, - image: str, - rai_config: Optional["_models.RaiConfig"] = None, - tools: Optional[list["_models.Tool"]] = None, - environment_variables: Optional[dict[str, str]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class ImageGenTool(Tool, discriminator="image_generation"): - """A tool that generates images using a model like ``gpt-image-1``. - - :ivar type: The type of the image generation tool. Always ``image_generation``. Required. - :vartype type: str or ~azure.ai.projects.models.IMAGE_GENERATION - :ivar model: The image generation model to use. Default: ``gpt-image-1``. Default value is - "gpt-image-1". - :vartype model: str - :ivar quality: The quality of the generated image. One of ``low``, ``medium``, ``high``, - or ``auto``. Default: ``auto``. Is one of the following types: Literal["low"], - Literal["medium"], Literal["high"], Literal["auto"] - :vartype quality: str or str or str or str - :ivar size: The size of the generated image. One of ``1024x1024``, ``1024x1536``, - ``1536x1024``, or ``auto``. Default: ``auto``. Is one of the following types: - Literal["1024x1024"], Literal["1024x1536"], Literal["1536x1024"], Literal["auto"] - :vartype size: str or str or str or str - :ivar output_format: The output format of the generated image. One of ``png``, ``webp``, or - ``jpeg``. Default: ``png``. Is one of the following types: Literal["png"], Literal["webp"], - Literal["jpeg"] - :vartype output_format: str or str or str - :ivar output_compression: Compression level for the output image. Default: 100. - :vartype output_compression: int - :ivar moderation: Moderation level for the generated image. Default: ``auto``. Is either a - Literal["auto"] type or a Literal["low"] type. - :vartype moderation: str or str - :ivar background: Background type for the generated image. One of ``transparent``, - ``opaque``, or ``auto``. Default: ``auto``. Is one of the following types: - Literal["transparent"], Literal["opaque"], Literal["auto"] - :vartype background: str or str or str - :ivar input_image_mask: Optional mask for inpainting. Contains ``image_url`` - (string, optional) and ``file_id`` (string, optional). - :vartype input_image_mask: ~azure.ai.projects.models.ImageGenToolInputImageMask - :ivar partial_images: Number of partial images to generate in streaming mode, from 0 (default - value) to 3. - :vartype partial_images: int - """ - - type: Literal[ToolType.IMAGE_GENERATION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the image generation tool. Always ``image_generation``. Required.""" - model: Optional[Literal["gpt-image-1"]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The image generation model to use. Default: ``gpt-image-1``. Default value is \"gpt-image-1\".""" - quality: Optional[Literal["low", "medium", "high", "auto"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The quality of the generated image. One of ``low``, ``medium``, ``high``, - or ``auto``. Default: ``auto``. Is one of the following types: Literal[\"low\"], - Literal[\"medium\"], Literal[\"high\"], Literal[\"auto\"]""" - size: Optional[Literal["1024x1024", "1024x1536", "1536x1024", "auto"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The size of the generated image. One of ``1024x1024``, ``1024x1536``, - ``1536x1024``, or ``auto``. Default: ``auto``. Is one of the following types: - Literal[\"1024x1024\"], Literal[\"1024x1536\"], Literal[\"1536x1024\"], Literal[\"auto\"]""" - output_format: Optional[Literal["png", "webp", "jpeg"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The output format of the generated image. One of ``png``, ``webp``, or - ``jpeg``. Default: ``png``. Is one of the following types: Literal[\"png\"], Literal[\"webp\"], - Literal[\"jpeg\"]""" - output_compression: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Compression level for the output image. Default: 100.""" - moderation: Optional[Literal["auto", "low"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] + testing_criteria: str = rest_field( + name="testingCriteria", visibility=["read", "create", "update", "delete", "query"] ) - """Moderation level for the generated image. Default: ``auto``. Is either a Literal[\"auto\"] type - or a Literal[\"low\"] type.""" - background: Optional[Literal["transparent", "opaque", "auto"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] + """Name of the testing criteria. Required.""" + metric: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Metric being evaluated. Required.""" + evaluator: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Name of the evaluator for this testing criteria. Required.""" + baseline_run_summary: "_models.EvalRunResultSummary" = rest_field( + name="baselineRunSummary", visibility=["read", "create", "update", "delete", "query"] ) - """Background type for the generated image. One of ``transparent``, - ``opaque``, or ``auto``. Default: ``auto``. Is one of the following types: - Literal[\"transparent\"], Literal[\"opaque\"], Literal[\"auto\"]""" - input_image_mask: Optional["_models.ImageGenToolInputImageMask"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] + """Summary statistics of the baseline run. Required.""" + compare_items: list["_models.EvalRunResultCompareItem"] = rest_field( + name="compareItems", visibility=["read", "create", "update", "delete", "query"] ) - """Optional mask for inpainting. Contains ``image_url`` - (string, optional) and ``file_id`` (string, optional).""" - partial_images: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Number of partial images to generate in streaming mode, from 0 (default value) to 3.""" + """List of comparison results for each treatment run. Required.""" @overload def __init__( self, *, - model: Optional[Literal["gpt-image-1"]] = None, - quality: Optional[Literal["low", "medium", "high", "auto"]] = None, - size: Optional[Literal["1024x1024", "1024x1536", "1536x1024", "auto"]] = None, - output_format: Optional[Literal["png", "webp", "jpeg"]] = None, - output_compression: Optional[int] = None, - moderation: Optional[Literal["auto", "low"]] = None, - background: Optional[Literal["transparent", "opaque", "auto"]] = None, - input_image_mask: Optional["_models.ImageGenToolInputImageMask"] = None, - partial_images: Optional[int] = None, + testing_criteria: str, + metric: str, + evaluator: str, + baseline_run_summary: "_models.EvalRunResultSummary", + compare_items: list["_models.EvalRunResultCompareItem"], ) -> None: ... @overload @@ -6251,28 +4432,40 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ToolType.IMAGE_GENERATION # type: ignore -class ImageGenToolCallItemParam(ItemParam, discriminator="image_generation_call"): - """An image generation request made by the model. +class EvalRunResultSummary(_Model): + """Summary statistics of a metric in an evaluation run. - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.IMAGE_GENERATION_CALL - :ivar result: The generated image encoded in base64. Required. - :vartype result: str + :ivar run_id: The evaluation run ID. Required. + :vartype run_id: str + :ivar sample_count: Number of samples in the evaluation run. Required. + :vartype sample_count: int + :ivar average: Average value of the metric in the evaluation run. Required. + :vartype average: float + :ivar standard_deviation: Standard deviation of the metric in the evaluation run. Required. + :vartype standard_deviation: float """ - type: Literal[ItemType.IMAGE_GENERATION_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - result: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The generated image encoded in base64. Required.""" + run_id: str = rest_field(name="runId", visibility=["read", "create", "update", "delete", "query"]) + """The evaluation run ID. Required.""" + sample_count: int = rest_field(name="sampleCount", visibility=["read", "create", "update", "delete", "query"]) + """Number of samples in the evaluation run. Required.""" + average: float = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Average value of the metric in the evaluation run. Required.""" + standard_deviation: float = rest_field( + name="standardDeviation", visibility=["read", "create", "update", "delete", "query"] + ) + """Standard deviation of the metric in the evaluation run. Required.""" @overload def __init__( self, *, - result: str, + run_id: str, + sample_count: int, + average: float, + standard_deviation: float, ) -> None: ... @overload @@ -6284,43 +4477,39 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ItemType.IMAGE_GENERATION_CALL # type: ignore -class ImageGenToolCallItemResource(ItemResource, discriminator="image_generation_call"): - """An image generation request made by the model. +class EvaluationComparisonRequest(InsightRequest, discriminator="EvaluationComparison"): + """Evaluation Comparison Request. - :ivar id: Required. - :vartype id: str - :ivar created_by: The information about the creator of the item. - :vartype created_by: ~azure.ai.projects.models.CreatedBy - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.IMAGE_GENERATION_CALL - :ivar status: Required. Is one of the following types: Literal["in_progress"], - Literal["completed"], Literal["generating"], Literal["failed"] - :vartype status: str or str or str or str - :ivar result: The generated image encoded in base64. Required. - :vartype result: str + :ivar type: The type of request. Required. Evaluation Comparison. + :vartype type: str or ~azure.ai.projects.models.EVALUATION_COMPARISON + :ivar eval_id: Identifier for the evaluation. Required. + :vartype eval_id: str + :ivar baseline_run_id: The baseline run ID for comparison. Required. + :vartype baseline_run_id: str + :ivar treatment_run_ids: List of treatment run IDs for comparison. Required. + :vartype treatment_run_ids: list[str] """ - type: Literal[ItemType.IMAGE_GENERATION_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - status: Literal["in_progress", "completed", "generating", "failed"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] + type: Literal[InsightType.EVALUATION_COMPARISON] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of request. Required. Evaluation Comparison.""" + eval_id: str = rest_field(name="evalId", visibility=["read", "create", "update", "delete", "query"]) + """Identifier for the evaluation. Required.""" + baseline_run_id: str = rest_field(name="baselineRunId", visibility=["read", "create", "update", "delete", "query"]) + """The baseline run ID for comparison. Required.""" + treatment_run_ids: list[str] = rest_field( + name="treatmentRunIds", visibility=["read", "create", "update", "delete", "query"] ) - """Required. Is one of the following types: Literal[\"in_progress\"], Literal[\"completed\"], - Literal[\"generating\"], Literal[\"failed\"]""" - result: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The generated image encoded in base64. Required.""" + """List of treatment run IDs for comparison. Required.""" @overload def __init__( self, *, - id: str, # pylint: disable=redefined-builtin - status: Literal["in_progress", "completed", "generating", "failed"], - result: str, - created_by: Optional["_models.CreatedBy"] = None, + eval_id: str, + baseline_run_id: str, + treatment_run_ids: list[str], ) -> None: ... @overload @@ -6332,29 +4521,45 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ItemType.IMAGE_GENERATION_CALL # type: ignore + self.type = InsightType.EVALUATION_COMPARISON # type: ignore -class ImageGenToolInputImageMask(_Model): - """ImageGenToolInputImageMask. +class InsightSample(_Model): + """A sample from the analysis. - :ivar image_url: Base64-encoded mask image. - :vartype image_url: str - :ivar file_id: File ID for the mask image. - :vartype file_id: str + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + EvaluationResultSample + + :ivar id: The unique identifier for the analysis sample. Required. + :vartype id: str + :ivar type: Sample type. Required. "EvaluationResultSample" + :vartype type: str or ~azure.ai.projects.models.SampleType + :ivar features: Features to help with additional filtering of data in UX. Required. + :vartype features: dict[str, any] + :ivar correlation_info: Info about the correlation for the analysis sample. Required. + :vartype correlation_info: dict[str, any] """ - image_url: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Base64-encoded mask image.""" - file_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """File ID for the mask image.""" + __mapping__: dict[str, _Model] = {} + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique identifier for the analysis sample. Required.""" + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Sample type. Required. \"EvaluationResultSample\"""" + features: dict[str, Any] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Features to help with additional filtering of data in UX. Required.""" + correlation_info: dict[str, Any] = rest_field( + name="correlationInfo", visibility=["read", "create", "update", "delete", "query"] + ) + """Info about the correlation for the analysis sample. Required.""" @overload def __init__( self, *, - image_url: Optional[str] = None, - file_id: Optional[str] = None, + id: str, # pylint: disable=redefined-builtin + type: str, + features: dict[str, Any], + correlation_info: dict[str, Any], ) -> None: ... @overload @@ -6368,44 +4573,36 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class Insight(_Model): - """The response body for cluster insights. +class EvaluationResultSample(InsightSample, discriminator="EvaluationResultSample"): + """A sample from the evaluation result. - :ivar id: The unique identifier for the insights report. Required. + :ivar id: The unique identifier for the analysis sample. Required. :vartype id: str - :ivar metadata: Metadata about the insights report. Required. - :vartype metadata: ~azure.ai.projects.models.InsightsMetadata - :ivar state: The current state of the insights. Required. Known values are: "NotStarted", - "Running", "Succeeded", "Failed", and "Canceled". - :vartype state: str or ~azure.ai.projects.models.OperationState - :ivar display_name: User friendly display name for the insight. Required. - :vartype display_name: str - :ivar request: Request for the insights analysis. Required. - :vartype request: ~azure.ai.projects.models.InsightRequest - :ivar result: The result of the insights report. - :vartype result: ~azure.ai.projects.models.InsightResult + :ivar features: Features to help with additional filtering of data in UX. Required. + :vartype features: dict[str, any] + :ivar correlation_info: Info about the correlation for the analysis sample. Required. + :vartype correlation_info: dict[str, any] + :ivar type: Evaluation Result Sample Type. Required. A sample from the evaluation result. + :vartype type: str or ~azure.ai.projects.models.EVALUATION_RESULT_SAMPLE + :ivar evaluation_result: Evaluation result for the analysis sample. Required. + :vartype evaluation_result: ~azure.ai.projects.models.EvalResult """ - id: str = rest_field(visibility=["read"]) - """The unique identifier for the insights report. Required.""" - metadata: "_models.InsightsMetadata" = rest_field(visibility=["read"]) - """Metadata about the insights report. Required.""" - state: Union[str, "_models.OperationState"] = rest_field(visibility=["read"]) - """The current state of the insights. Required. Known values are: \"NotStarted\", \"Running\", - \"Succeeded\", \"Failed\", and \"Canceled\".""" - display_name: str = rest_field(name="displayName", visibility=["read", "create", "update", "delete", "query"]) - """User friendly display name for the insight. Required.""" - request: "_models.InsightRequest" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Request for the insights analysis. Required.""" - result: Optional["_models.InsightResult"] = rest_field(visibility=["read"]) - """The result of the insights report.""" + type: Literal[SampleType.EVALUATION_RESULT_SAMPLE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Evaluation Result Sample Type. Required. A sample from the evaluation result.""" + evaluation_result: "_models.EvalResult" = rest_field( + name="evaluationResult", visibility=["read", "create", "update", "delete", "query"] + ) + """Evaluation result for the analysis sample. Required.""" @overload def __init__( self, *, - display_name: str, - request: "_models.InsightRequest", + id: str, # pylint: disable=redefined-builtin + features: dict[str, Any], + correlation_info: dict[str, Any], + evaluation_result: "_models.EvalResult", ) -> None: ... @overload @@ -6417,66 +4614,65 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) + self.type = SampleType.EVALUATION_RESULT_SAMPLE # type: ignore -class InsightCluster(_Model): - """A cluster of analysis samples. +class EvaluationRule(_Model): + """Evaluation rule model. - :ivar id: The id of the analysis cluster. Required. + :ivar id: Unique identifier for the evaluation rule. Required. :vartype id: str - :ivar label: Label for the cluster. Required. - :vartype label: str - :ivar suggestion: Suggestion for the cluster. Required. - :vartype suggestion: str - :ivar suggestion_title: The title of the suggestion for the cluster. Required. - :vartype suggestion_title: str - :ivar description: Description of the analysis cluster. Required. + :ivar display_name: Display Name for the evaluation rule. + :vartype display_name: str + :ivar description: Description for the evaluation rule. :vartype description: str - :ivar weight: The weight of the analysis cluster. This indicate number of samples in the - cluster. Required. - :vartype weight: int - :ivar sub_clusters: List of subclusters within this cluster. Empty if no subclusters exist. - :vartype sub_clusters: list[~azure.ai.projects.models.InsightCluster] - :ivar samples: List of samples that belong to this cluster. Empty if samples are part of - subclusters. - :vartype samples: list[~azure.ai.projects.models.InsightSample] + :ivar action: Definition of the evaluation rule action. Required. + :vartype action: ~azure.ai.projects.models.EvaluationRuleAction + :ivar filter: Filter condition of the evaluation rule. + :vartype filter: ~azure.ai.projects.models.EvaluationRuleFilter + :ivar event_type: Event type that the evaluation rule applies to. Required. Known values are: + "responseCompleted" and "manual". + :vartype event_type: str or ~azure.ai.projects.models.EvaluationRuleEventType + :ivar enabled: Indicates whether the evaluation rule is enabled. Default is true. Required. + :vartype enabled: bool + :ivar system_data: System metadata for the evaluation rule. Required. + :vartype system_data: dict[str, str] """ - id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The id of the analysis cluster. Required.""" - label: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Label for the cluster. Required.""" - suggestion: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Suggestion for the cluster. Required.""" - suggestion_title: str = rest_field( - name="suggestionTitle", visibility=["read", "create", "update", "delete", "query"] - ) - """The title of the suggestion for the cluster. Required.""" - description: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Description of the analysis cluster. Required.""" - weight: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The weight of the analysis cluster. This indicate number of samples in the cluster. Required.""" - sub_clusters: Optional[list["_models.InsightCluster"]] = rest_field( - name="subClusters", visibility=["read", "create", "update", "delete", "query"] + id: str = rest_field(visibility=["read"]) + """Unique identifier for the evaluation rule. Required.""" + display_name: Optional[str] = rest_field( + name="displayName", visibility=["read", "create", "update", "delete", "query"] ) - """List of subclusters within this cluster. Empty if no subclusters exist.""" - samples: Optional[list["_models.InsightSample"]] = rest_field( + """Display Name for the evaluation rule.""" + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Description for the evaluation rule.""" + action: "_models.EvaluationRuleAction" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Definition of the evaluation rule action. Required.""" + filter: Optional["_models.EvaluationRuleFilter"] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) - """List of samples that belong to this cluster. Empty if samples are part of subclusters.""" + """Filter condition of the evaluation rule.""" + event_type: Union[str, "_models.EvaluationRuleEventType"] = rest_field( + name="eventType", visibility=["read", "create", "update", "delete", "query"] + ) + """Event type that the evaluation rule applies to. Required. Known values are: + \"responseCompleted\" and \"manual\".""" + enabled: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Indicates whether the evaluation rule is enabled. Default is true. Required.""" + system_data: dict[str, str] = rest_field(name="systemData", visibility=["read"]) + """System metadata for the evaluation rule. Required.""" @overload def __init__( self, *, - id: str, # pylint: disable=redefined-builtin - label: str, - suggestion: str, - suggestion_title: str, - description: str, - weight: int, - sub_clusters: Optional[list["_models.InsightCluster"]] = None, - samples: Optional[list["_models.InsightSample"]] = None, + action: "_models.EvaluationRuleAction", + event_type: Union[str, "_models.EvaluationRuleEventType"], + enabled: bool, + display_name: Optional[str] = None, + description: Optional[str] = None, + filter: Optional["_models.EvaluationRuleFilter"] = None, # pylint: disable=redefined-builtin ) -> None: ... @overload @@ -6490,26 +4686,21 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class InsightModelConfiguration(_Model): - """Configuration of the model used in the insight generation. +class EvaluationRuleFilter(_Model): + """Evaluation filter model. - :ivar model_deployment_name: The model deployment to be evaluated. Accepts either the - deployment name alone or with the connection name as '{connectionName}/'. - Required. - :vartype model_deployment_name: str + :ivar agent_name: Filter by agent name. Required. + :vartype agent_name: str """ - model_deployment_name: str = rest_field( - name="modelDeploymentName", visibility=["read", "create", "update", "delete", "query"] - ) - """The model deployment to be evaluated. Accepts either the deployment name alone or with the - connection name as '{connectionName}/'. Required.""" + agent_name: str = rest_field(name="agentName", visibility=["read", "create", "update", "delete", "query"]) + """Filter by agent name. Required.""" @overload def __init__( self, *, - model_deployment_name: str, + agent_name: str, ) -> None: ... @overload @@ -6523,28 +4714,27 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class InsightScheduleTask(ScheduleTask, discriminator="Insight"): - """Insight task for the schedule. +class EvaluationRunClusterInsightResult(InsightResult, discriminator="EvaluationRunClusterInsight"): + """Insights from the evaluation run cluster analysis. - :ivar configuration: Configuration for the task. - :vartype configuration: dict[str, str] - :ivar type: Required. Insight task. - :vartype type: str or ~azure.ai.projects.models.INSIGHT - :ivar insight: The insight payload. Required. - :vartype insight: ~azure.ai.projects.models.Insight + :ivar type: The type of insights result. Required. Insights on an Evaluation run result. + :vartype type: str or ~azure.ai.projects.models.EVALUATION_RUN_CLUSTER_INSIGHT + :ivar cluster_insight: Required. + :vartype cluster_insight: ~azure.ai.projects.models.ClusterInsightResult """ - type: Literal[ScheduleTaskType.INSIGHT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required. Insight task.""" - insight: "_models.Insight" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The insight payload. Required.""" + type: Literal[InsightType.EVALUATION_RUN_CLUSTER_INSIGHT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of insights result. Required. Insights on an Evaluation run result.""" + cluster_insight: "_models.ClusterInsightResult" = rest_field( + name="clusterInsight", visibility=["read", "create", "update", "delete", "query"] + ) + """Required.""" @overload def __init__( self, *, - insight: "_models.Insight", - configuration: Optional[dict[str, str]] = None, + cluster_insight: "_models.ClusterInsightResult", ) -> None: ... @overload @@ -6556,33 +4746,40 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ScheduleTaskType.INSIGHT # type: ignore + self.type = InsightType.EVALUATION_RUN_CLUSTER_INSIGHT # type: ignore -class InsightsMetadata(_Model): - """Metadata about the insights. +class EvaluationRunClusterInsightsRequest(InsightRequest, discriminator="EvaluationRunClusterInsight"): + """Insights on set of Evaluation Results. - :ivar created_at: The timestamp when the insights were created. Required. - :vartype created_at: ~datetime.datetime - :ivar completed_at: The timestamp when the insights were completed. - :vartype completed_at: ~datetime.datetime + :ivar type: The type of insights request. Required. Insights on an Evaluation run result. + :vartype type: str or ~azure.ai.projects.models.EVALUATION_RUN_CLUSTER_INSIGHT + :ivar eval_id: Evaluation Id for the insights. Required. + :vartype eval_id: str + :ivar run_ids: List of evaluation run IDs for the insights. Required. + :vartype run_ids: list[str] + :ivar model_configuration: Configuration of the model used in the insight generation. + :vartype model_configuration: ~azure.ai.projects.models.InsightModelConfiguration """ - created_at: datetime.datetime = rest_field( - name="createdAt", visibility=["read", "create", "update", "delete", "query"], format="rfc3339" - ) - """The timestamp when the insights were created. Required.""" - completed_at: Optional[datetime.datetime] = rest_field( - name="completedAt", visibility=["read", "create", "update", "delete", "query"], format="rfc3339" + type: Literal[InsightType.EVALUATION_RUN_CLUSTER_INSIGHT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of insights request. Required. Insights on an Evaluation run result.""" + eval_id: str = rest_field(name="evalId", visibility=["read", "create", "update", "delete", "query"]) + """Evaluation Id for the insights. Required.""" + run_ids: list[str] = rest_field(name="runIds", visibility=["read", "create", "update", "delete", "query"]) + """List of evaluation run IDs for the insights. Required.""" + model_configuration: Optional["_models.InsightModelConfiguration"] = rest_field( + name="modelConfiguration", visibility=["read", "create", "update", "delete", "query"] ) - """The timestamp when the insights were completed.""" + """Configuration of the model used in the insight generation.""" @overload def __init__( self, *, - created_at: datetime.datetime, - completed_at: Optional[datetime.datetime] = None, + eval_id: str, + run_ids: list[str], + model_configuration: Optional["_models.InsightModelConfiguration"] = None, ) -> None: ... @overload @@ -6594,47 +4791,33 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) + self.type = InsightType.EVALUATION_RUN_CLUSTER_INSIGHT # type: ignore -class InsightSummary(_Model): - """Summary of the error cluster analysis. +class ScheduleTask(_Model): + """Schedule task model. - :ivar sample_count: Total number of samples analyzed. Required. - :vartype sample_count: int - :ivar unique_subcluster_count: Total number of unique subcluster labels. Required. - :vartype unique_subcluster_count: int - :ivar unique_cluster_count: Total number of unique clusters. Required. - :vartype unique_cluster_count: int - :ivar method: Method used for clustering. Required. - :vartype method: str - :ivar usage: Token usage while performing clustering analysis. Required. - :vartype usage: ~azure.ai.projects.models.ClusterTokenUsage + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + EvaluationScheduleTask, InsightScheduleTask + + :ivar type: Type of the task. Required. Known values are: "Evaluation" and "Insight". + :vartype type: str or ~azure.ai.projects.models.ScheduleTaskType + :ivar configuration: Configuration for the task. + :vartype configuration: dict[str, str] """ - sample_count: int = rest_field(name="sampleCount", visibility=["read", "create", "update", "delete", "query"]) - """Total number of samples analyzed. Required.""" - unique_subcluster_count: int = rest_field( - name="uniqueSubclusterCount", visibility=["read", "create", "update", "delete", "query"] - ) - """Total number of unique subcluster labels. Required.""" - unique_cluster_count: int = rest_field( - name="uniqueClusterCount", visibility=["read", "create", "update", "delete", "query"] - ) - """Total number of unique clusters. Required.""" - method: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Method used for clustering. Required.""" - usage: "_models.ClusterTokenUsage" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Token usage while performing clustering analysis. Required.""" + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Type of the task. Required. Known values are: \"Evaluation\" and \"Insight\".""" + configuration: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Configuration for the task.""" @overload def __init__( self, *, - sample_count: int, - unique_subcluster_count: int, - unique_cluster_count: int, - method: str, - usage: "_models.ClusterTokenUsage", + type: str, + configuration: Optional[dict[str, str]] = None, ) -> None: ... @overload @@ -6648,28 +4831,33 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class ItemContent(_Model): - """ItemContent. - - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - ItemContentInputAudio, ItemContentInputFile, ItemContentInputImage, ItemContentInputText, - ItemContentOutputAudio, ItemContentOutputText, ItemContentRefusal +class EvaluationScheduleTask(ScheduleTask, discriminator="Evaluation"): + """Evaluation task for the schedule. - :ivar type: Required. Known values are: "input_text", "input_audio", "input_image", - "input_file", "output_text", "output_audio", and "refusal". - :vartype type: str or ~azure.ai.projects.models.ItemContentType + :ivar configuration: Configuration for the task. + :vartype configuration: dict[str, str] + :ivar type: Required. Evaluation task. + :vartype type: str or ~azure.ai.projects.models.EVALUATION + :ivar eval_id: Identifier of the evaluation group. Required. + :vartype eval_id: str + :ivar eval_run: The evaluation run payload. Required. + :vartype eval_run: any """ - __mapping__: dict[str, _Model] = {} - type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) - """Required. Known values are: \"input_text\", \"input_audio\", \"input_image\", \"input_file\", - \"output_text\", \"output_audio\", and \"refusal\".""" + type: Literal[ScheduleTaskType.EVALUATION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required. Evaluation task.""" + eval_id: str = rest_field(name="evalId", visibility=["read", "create", "update", "delete", "query"]) + """Identifier of the evaluation group. Required.""" + eval_run: Any = rest_field(name="evalRun", visibility=["read", "create", "update", "delete", "query"]) + """The evaluation run payload. Required.""" @overload def __init__( self, *, - type: str, + eval_id: str, + eval_run: Any, + configuration: Optional[dict[str, str]] = None, ) -> None: ... @overload @@ -6681,34 +4869,60 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) + self.type = ScheduleTaskType.EVALUATION # type: ignore -class ItemContentInputAudio(ItemContent, discriminator="input_audio"): - """An audio input to the model. +class EvaluationTaxonomy(_Model): + """Evaluation Taxonomy Definition. - :ivar type: The type of the input item. Always ``input_audio``. Required. - :vartype type: str or ~azure.ai.projects.models.INPUT_AUDIO - :ivar data: Base64-encoded audio data. Required. - :vartype data: str - :ivar format: The format of the audio data. Currently supported formats are ``mp3`` and - ``wav``. Required. Is either a Literal["mp3"] type or a Literal["wav"] type. - :vartype format: str or str + :ivar id: Asset ID, a unique identifier for the asset. + :vartype id: str + :ivar name: The name of the resource. Required. + :vartype name: str + :ivar version: The version of the resource. Required. + :vartype version: str + :ivar description: The asset description text. + :vartype description: str + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar taxonomy_input: Input configuration for the evaluation taxonomy. Required. + :vartype taxonomy_input: ~azure.ai.projects.models.EvaluationTaxonomyInput + :ivar taxonomy_categories: List of taxonomy categories. + :vartype taxonomy_categories: list[~azure.ai.projects.models.TaxonomyCategory] + :ivar properties: Additional properties for the evaluation taxonomy. + :vartype properties: dict[str, str] """ - type: Literal[ItemContentType.INPUT_AUDIO] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the input item. Always ``input_audio``. Required.""" - data: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Base64-encoded audio data. Required.""" - format: Literal["mp3", "wav"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The format of the audio data. Currently supported formats are ``mp3`` and - ``wav``. Required. Is either a Literal[\"mp3\"] type or a Literal[\"wav\"] type.""" + id: Optional[str] = rest_field(visibility=["read"]) + """Asset ID, a unique identifier for the asset.""" + name: str = rest_field(visibility=["read"]) + """The name of the resource. Required.""" + version: str = rest_field(visibility=["read"]) + """The version of the resource. Required.""" + description: Optional[str] = rest_field(visibility=["create", "update"]) + """The asset description text.""" + tags: Optional[dict[str, str]] = rest_field(visibility=["create", "update"]) + """Tag dictionary. Tags can be added, removed, and updated.""" + taxonomy_input: "_models.EvaluationTaxonomyInput" = rest_field( + name="taxonomyInput", visibility=["read", "create", "update", "delete", "query"] + ) + """Input configuration for the evaluation taxonomy. Required.""" + taxonomy_categories: Optional[list["_models.TaxonomyCategory"]] = rest_field( + name="taxonomyCategories", visibility=["read", "create", "update", "delete", "query"] + ) + """List of taxonomy categories.""" + properties: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Additional properties for the evaluation taxonomy.""" @overload def __init__( self, *, - data: str, - format: Literal["mp3", "wav"], + taxonomy_input: "_models.EvaluationTaxonomyInput", + description: Optional[str] = None, + tags: Optional[dict[str, str]] = None, + taxonomy_categories: Optional[list["_models.TaxonomyCategory"]] = None, + properties: Optional[dict[str, str]] = None, ) -> None: ... @overload @@ -6720,38 +4934,49 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ItemContentType.INPUT_AUDIO # type: ignore -class ItemContentInputFile(ItemContent, discriminator="input_file"): - """A file input to the model. +class EvaluatorMetric(_Model): + """Evaluator Metric. - :ivar type: The type of the input item. Always ``input_file``. Required. - :vartype type: str or ~azure.ai.projects.models.INPUT_FILE - :ivar file_id: The ID of the file to be sent to the model. - :vartype file_id: str - :ivar filename: The name of the file to be sent to the model. - :vartype filename: str - :ivar file_data: The content of the file to be sent to the model. - :vartype file_data: str + :ivar type: Type of the metric. Known values are: "ordinal", "continuous", and "boolean". + :vartype type: str or ~azure.ai.projects.models.EvaluatorMetricType + :ivar desirable_direction: It indicates whether a higher value is better or a lower value is + better for this metric. Known values are: "increase", "decrease", and "neutral". + :vartype desirable_direction: str or ~azure.ai.projects.models.EvaluatorMetricDirection + :ivar min_value: Minimum value for the metric. + :vartype min_value: float + :ivar max_value: Maximum value for the metric. If not specified, it is assumed to be unbounded. + :vartype max_value: float + :ivar is_primary: Indicates if this metric is primary when there are multiple metrics. + :vartype is_primary: bool """ - type: Literal[ItemContentType.INPUT_FILE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the input item. Always ``input_file``. Required.""" - file_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The ID of the file to be sent to the model.""" - filename: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The name of the file to be sent to the model.""" - file_data: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The content of the file to be sent to the model.""" + type: Optional[Union[str, "_models.EvaluatorMetricType"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Type of the metric. Known values are: \"ordinal\", \"continuous\", and \"boolean\".""" + desirable_direction: Optional[Union[str, "_models.EvaluatorMetricDirection"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """It indicates whether a higher value is better or a lower value is better for this metric. Known + values are: \"increase\", \"decrease\", and \"neutral\".""" + min_value: Optional[float] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Minimum value for the metric.""" + max_value: Optional[float] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Maximum value for the metric. If not specified, it is assumed to be unbounded.""" + is_primary: Optional[bool] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Indicates if this metric is primary when there are multiple metrics.""" @overload def __init__( self, *, - file_id: Optional[str] = None, - filename: Optional[str] = None, - file_data: Optional[str] = None, + type: Optional[Union[str, "_models.EvaluatorMetricType"]] = None, + desirable_direction: Optional[Union[str, "_models.EvaluatorMetricDirection"]] = None, + min_value: Optional[float] = None, + max_value: Optional[float] = None, + is_primary: Optional[bool] = None, ) -> None: ... @overload @@ -6763,47 +4988,82 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ItemContentType.INPUT_FILE # type: ignore -class ItemContentInputImage(ItemContent, discriminator="input_image"): - """An image input to the model. Learn about `image inputs - `_. +class EvaluatorVersion(_Model): + """Evaluator Definition. - :ivar type: The type of the input item. Always ``input_image``. Required. - :vartype type: str or ~azure.ai.projects.models.INPUT_IMAGE - :ivar image_url: The URL of the image to be sent to the model. A fully qualified URL or base64 - encoded image in a data URL. - :vartype image_url: str - :ivar file_id: The ID of the file to be sent to the model. - :vartype file_id: str - :ivar detail: The detail level of the image to be sent to the model. One of ``high``, ``low``, - or ``auto``. Defaults to ``auto``. Is one of the following types: Literal["low"], - Literal["high"], Literal["auto"] - :vartype detail: str or str or str + :ivar display_name: Display Name for evaluator. It helps to find the evaluator easily in AI + Foundry. It does not need to be unique. + :vartype display_name: str + :ivar metadata: Metadata about the evaluator. + :vartype metadata: dict[str, str] + :ivar evaluator_type: The type of the evaluator. Required. Known values are: "builtin" and + "custom". + :vartype evaluator_type: str or ~azure.ai.projects.models.EvaluatorType + :ivar categories: The categories of the evaluator. Required. + :vartype categories: list[str or ~azure.ai.projects.models.EvaluatorCategory] + :ivar definition: Definition of the evaluator. Required. + :vartype definition: ~azure.ai.projects.models.EvaluatorDefinition + :ivar created_by: Creator of the evaluator. Required. + :vartype created_by: str + :ivar created_at: Creation date/time of the evaluator. Required. + :vartype created_at: int + :ivar modified_at: Last modified date/time of the evaluator. Required. + :vartype modified_at: int + :ivar id: Asset ID, a unique identifier for the asset. + :vartype id: str + :ivar name: The name of the resource. Required. + :vartype name: str + :ivar version: The version of the resource. Required. + :vartype version: str + :ivar description: The asset description text. + :vartype description: str + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] """ - type: Literal[ItemContentType.INPUT_IMAGE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the input item. Always ``input_image``. Required.""" - image_url: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The URL of the image to be sent to the model. A fully qualified URL or base64 encoded image in - a data URL.""" - file_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The ID of the file to be sent to the model.""" - detail: Optional[Literal["low", "high", "auto"]] = rest_field( + display_name: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Display Name for evaluator. It helps to find the evaluator easily in AI Foundry. It does not + need to be unique.""" + metadata: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Metadata about the evaluator.""" + evaluator_type: Union[str, "_models.EvaluatorType"] = rest_field(visibility=["read", "create"]) + """The type of the evaluator. Required. Known values are: \"builtin\" and \"custom\".""" + categories: list[Union[str, "_models.EvaluatorCategory"]] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) - """The detail level of the image to be sent to the model. One of ``high``, ``low``, or ``auto``. - Defaults to ``auto``. Is one of the following types: Literal[\"low\"], Literal[\"high\"], - Literal[\"auto\"]""" + """The categories of the evaluator. Required.""" + definition: "_models.EvaluatorDefinition" = rest_field(visibility=["read", "create"]) + """Definition of the evaluator. Required.""" + created_by: str = rest_field(visibility=["read"]) + """Creator of the evaluator. Required.""" + created_at: int = rest_field(visibility=["read"]) + """Creation date/time of the evaluator. Required.""" + modified_at: int = rest_field(visibility=["read"]) + """Last modified date/time of the evaluator. Required.""" + id: Optional[str] = rest_field(visibility=["read"]) + """Asset ID, a unique identifier for the asset.""" + name: str = rest_field(visibility=["read"]) + """The name of the resource. Required.""" + version: str = rest_field(visibility=["read"]) + """The version of the resource. Required.""" + description: Optional[str] = rest_field(visibility=["create", "update"]) + """The asset description text.""" + tags: Optional[dict[str, str]] = rest_field(visibility=["create", "update"]) + """Tag dictionary. Tags can be added, removed, and updated.""" @overload def __init__( self, *, - image_url: Optional[str] = None, - file_id: Optional[str] = None, - detail: Optional[Literal["low", "high", "auto"]] = None, + evaluator_type: Union[str, "_models.EvaluatorType"], + categories: list[Union[str, "_models.EvaluatorCategory"]], + definition: "_models.EvaluatorDefinition", + display_name: Optional[str] = None, + metadata: Optional[dict[str, str]] = None, + description: Optional[str] = None, + tags: Optional[dict[str, str]] = None, ) -> None: ... @overload @@ -6815,28 +5075,28 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ItemContentType.INPUT_IMAGE # type: ignore -class ItemContentInputText(ItemContent, discriminator="input_text"): - """A text input to the model. +class FabricDataAgentToolParameters(_Model): + """The fabric data agent tool parameters. - :ivar type: The type of the input item. Always ``input_text``. Required. - :vartype type: str or ~azure.ai.projects.models.INPUT_TEXT - :ivar text: The text input to the model. Required. - :vartype text: str + :ivar project_connections: The project connections attached to this tool. There can be a + maximum of 1 connection + resource attached to the tool. + :vartype project_connections: list[~azure.ai.projects.models.ToolProjectConnection] """ - type: Literal[ItemContentType.INPUT_TEXT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the input item. Always ``input_text``. Required.""" - text: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The text input to the model. Required.""" + project_connections: Optional[list["_models.ToolProjectConnection"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The project connections attached to this tool. There can be a maximum of 1 connection + resource attached to the tool.""" @overload def __init__( self, *, - text: str, + project_connections: Optional[list["_models.ToolProjectConnection"]] = None, ) -> None: ... @overload @@ -6848,33 +5108,48 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ItemContentType.INPUT_TEXT # type: ignore -class ItemContentOutputAudio(ItemContent, discriminator="output_audio"): - """An audio output from the model. +class FieldMapping(_Model): + """Field mapping configuration class. - :ivar type: The type of the output audio. Always ``output_audio``. Required. - :vartype type: str or ~azure.ai.projects.models.OUTPUT_AUDIO - :ivar data: Base64-encoded audio data from the model. Required. - :vartype data: str - :ivar transcript: The transcript of the audio data from the model. Required. - :vartype transcript: str + :ivar content_fields: List of fields with text content. Required. + :vartype content_fields: list[str] + :ivar filepath_field: Path of file to be used as a source of text content. + :vartype filepath_field: str + :ivar title_field: Field containing the title of the document. + :vartype title_field: str + :ivar url_field: Field containing the url of the document. + :vartype url_field: str + :ivar vector_fields: List of fields with vector content. + :vartype vector_fields: list[str] + :ivar metadata_fields: List of fields with metadata content. + :vartype metadata_fields: list[str] """ - type: Literal[ItemContentType.OUTPUT_AUDIO] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the output audio. Always ``output_audio``. Required.""" - data: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Base64-encoded audio data from the model. Required.""" - transcript: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The transcript of the audio data from the model. Required.""" + content_fields: list[str] = rest_field(name="contentFields", visibility=["create"]) + """List of fields with text content. Required.""" + filepath_field: Optional[str] = rest_field(name="filepathField", visibility=["create"]) + """Path of file to be used as a source of text content.""" + title_field: Optional[str] = rest_field(name="titleField", visibility=["create"]) + """Field containing the title of the document.""" + url_field: Optional[str] = rest_field(name="urlField", visibility=["create"]) + """Field containing the url of the document.""" + vector_fields: Optional[list[str]] = rest_field(name="vectorFields", visibility=["create"]) + """List of fields with vector content.""" + metadata_fields: Optional[list[str]] = rest_field(name="metadataFields", visibility=["create"]) + """List of fields with metadata content.""" @overload def __init__( self, *, - data: str, - transcript: str, + content_fields: list[str], + filepath_field: Optional[str] = None, + title_field: Optional[str] = None, + url_field: Optional[str] = None, + vector_fields: Optional[list[str]] = None, + metadata_fields: Optional[list[str]] = None, ) -> None: ... @overload @@ -6886,37 +5161,37 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ItemContentType.OUTPUT_AUDIO # type: ignore -class ItemContentOutputText(ItemContent, discriminator="output_text"): - """A text output from the model. +class FileCitationBody(Annotation, discriminator="file_citation"): + """File citation. - :ivar type: The type of the output text. Always ``output_text``. Required. - :vartype type: str or ~azure.ai.projects.models.OUTPUT_TEXT - :ivar text: The text output from the model. Required. - :vartype text: str - :ivar annotations: The annotations of the text output. Required. - :vartype annotations: list[~azure.ai.projects.models.Annotation] - :ivar logprobs: - :vartype logprobs: list[~azure.ai.projects.models.LogProb] + :ivar type: The type of the file citation. Always ``file_citation``. Required. + :vartype type: str or ~azure.ai.projects.models.FILE_CITATION + :ivar file_id: The ID of the file. Required. + :vartype file_id: str + :ivar index: The index of the file in the list of files. Required. + :vartype index: int + :ivar filename: The filename of the file cited. Required. + :vartype filename: str """ - type: Literal[ItemContentType.OUTPUT_TEXT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the output text. Always ``output_text``. Required.""" - text: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The text output from the model. Required.""" - annotations: list["_models.Annotation"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The annotations of the text output. Required.""" - logprobs: Optional[list["_models.LogProb"]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + type: Literal[AnnotationType.FILE_CITATION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the file citation. Always ``file_citation``. Required.""" + file_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the file. Required.""" + index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the file in the list of files. Required.""" + filename: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The filename of the file cited. Required.""" @overload def __init__( self, *, - text: str, - annotations: list["_models.Annotation"], - logprobs: Optional[list["_models.LogProb"]] = None, + file_id: str, + index: int, + filename: str, ) -> None: ... @overload @@ -6928,28 +5203,47 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ItemContentType.OUTPUT_TEXT # type: ignore + self.type = AnnotationType.FILE_CITATION # type: ignore -class ItemContentRefusal(ItemContent, discriminator="refusal"): - """A refusal from the model. +class FileDatasetVersion(DatasetVersion, discriminator="uri_file"): + """FileDatasetVersion Definition. - :ivar type: The type of the refusal. Always ``refusal``. Required. - :vartype type: str or ~azure.ai.projects.models.REFUSAL - :ivar refusal: The refusal explanationfrom the model. Required. - :vartype refusal: str + :ivar data_uri: URI of the data (`example `_). + Required. + :vartype data_uri: str + :ivar is_reference: Indicates if the dataset holds a reference to the storage, or the dataset + manages storage itself. If true, the underlying data will not be deleted when the dataset + version is deleted. + :vartype is_reference: bool + :ivar connection_name: The Azure Storage Account connection name. Required if + startPendingUploadVersion was not called before creating the Dataset. + :vartype connection_name: str + :ivar id: Asset ID, a unique identifier for the asset. + :vartype id: str + :ivar name: The name of the resource. Required. + :vartype name: str + :ivar version: The version of the resource. Required. + :vartype version: str + :ivar description: The asset description text. + :vartype description: str + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar type: Dataset type. Required. URI file. + :vartype type: str or ~azure.ai.projects.models.URI_FILE """ - type: Literal[ItemContentType.REFUSAL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the refusal. Always ``refusal``. Required.""" - refusal: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The refusal explanationfrom the model. Required.""" + type: Literal[DatasetType.URI_FILE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Dataset type. Required. URI file.""" @overload def __init__( self, *, - refusal: str, + data_uri: str, + connection_name: Optional[str] = None, + description: Optional[str] = None, + tags: Optional[dict[str, str]] = None, ) -> None: ... @overload @@ -6961,29 +5255,33 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ItemContentType.REFUSAL # type: ignore + self.type = DatasetType.URI_FILE # type: ignore -class ItemReferenceItemParam(ItemParam, discriminator="item_reference"): - """An internal identifier for an item to reference. +class FilePath(Annotation, discriminator="file_path"): + """File path. - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.ITEM_REFERENCE - :ivar id: The service-originated ID of the previously generated response item being referenced. - Required. - :vartype id: str + :ivar type: The type of the file path. Always ``file_path``. Required. + :vartype type: str or ~azure.ai.projects.models.FILE_PATH + :ivar file_id: The ID of the file. Required. + :vartype file_id: str + :ivar index: The index of the file in the list of files. Required. + :vartype index: int """ - type: Literal[ItemType.ITEM_REFERENCE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The service-originated ID of the previously generated response item being referenced. Required.""" + type: Literal[AnnotationType.FILE_PATH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the file path. Always ``file_path``. Required.""" + file_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the file. Required.""" + index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the file in the list of files. Required.""" @overload def __init__( self, *, - id: str, # pylint: disable=redefined-builtin + file_id: str, + index: int, ) -> None: ... @overload @@ -6995,49 +5293,49 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ItemType.ITEM_REFERENCE # type: ignore + self.type = AnnotationType.FILE_PATH # type: ignore -class LocalShellExecAction(_Model): - """Execute a shell command on the server. +class FileSearchTool(Tool, discriminator="file_search"): + """File search. - :ivar type: The type of the local shell action. Always ``exec``. Required. Default value is - "exec". - :vartype type: str - :ivar command: The command to run. Required. - :vartype command: list[str] - :ivar timeout_ms: Optional timeout in milliseconds for the command. - :vartype timeout_ms: int - :ivar working_directory: Optional working directory to run the command in. - :vartype working_directory: str - :ivar env: Environment variables to set for the command. Required. - :vartype env: dict[str, str] - :ivar user: Optional user to run the command as. - :vartype user: str + :ivar type: The type of the file search tool. Always ``file_search``. Required. + :vartype type: str or ~azure.ai.projects.models.FILE_SEARCH + :ivar vector_store_ids: The IDs of the vector stores to search. Required. + :vartype vector_store_ids: list[str] + :ivar max_num_results: The maximum number of results to return. This number should be between 1 + and 50 inclusive. + :vartype max_num_results: int + :ivar ranking_options: Ranking options for search. + :vartype ranking_options: ~azure.ai.projects.models.RankingOptions + :ivar filters: Is either a ComparisonFilter type or a CompoundFilter type. + :vartype filters: ~azure.ai.projects.models.ComparisonFilter or + ~azure.ai.projects.models.CompoundFilter """ - type: Literal["exec"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The type of the local shell action. Always ``exec``. Required. Default value is \"exec\".""" - command: list[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The command to run. Required.""" - timeout_ms: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Optional timeout in milliseconds for the command.""" - working_directory: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Optional working directory to run the command in.""" - env: dict[str, str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Environment variables to set for the command. Required.""" - user: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Optional user to run the command as.""" + type: Literal[ToolType.FILE_SEARCH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the file search tool. Always ``file_search``. Required.""" + vector_store_ids: list[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The IDs of the vector stores to search. Required.""" + max_num_results: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The maximum number of results to return. This number should be between 1 and 50 inclusive.""" + ranking_options: Optional["_models.RankingOptions"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Ranking options for search.""" + filters: Optional[Union["_models.ComparisonFilter", "_models.CompoundFilter"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Is either a ComparisonFilter type or a CompoundFilter type.""" @overload def __init__( self, *, - command: list[str], - env: dict[str, str], - timeout_ms: Optional[int] = None, - working_directory: Optional[str] = None, - user: Optional[str] = None, + vector_store_ids: list[str], + max_num_results: Optional[int] = None, + ranking_options: Optional["_models.RankingOptions"] = None, + filters: Optional[Union["_models.ComparisonFilter", "_models.CompoundFilter"]] = None, ) -> None: ... @overload @@ -7049,22 +5347,41 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type: Literal["exec"] = "exec" + self.type = ToolType.FILE_SEARCH # type: ignore -class LocalShellTool(Tool, discriminator="local_shell"): - """A tool that allows the model to execute shell commands in a local environment. +class FileSearchToolCallResults(_Model): + """FileSearchToolCallResults. - :ivar type: The type of the local shell tool. Always ``local_shell``. Required. - :vartype type: str or ~azure.ai.projects.models.LOCAL_SHELL + :ivar file_id: + :vartype file_id: str + :ivar text: + :vartype text: str + :ivar filename: + :vartype filename: str + :ivar attributes: + :vartype attributes: ~azure.ai.projects.models.VectorStoreFileAttributes + :ivar score: + :vartype score: float """ - type: Literal[ToolType.LOCAL_SHELL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the local shell tool. Always ``local_shell``. Required.""" + file_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + text: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + filename: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + attributes: Optional["_models.VectorStoreFileAttributes"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + score: Optional[float] = rest_field(visibility=["read", "create", "update", "delete", "query"]) @overload def __init__( self, + *, + file_id: Optional[str] = None, + text: Optional[str] = None, + filename: Optional[str] = None, + attributes: Optional["_models.VectorStoreFileAttributes"] = None, + score: Optional[float] = None, ) -> None: ... @overload @@ -7076,33 +5393,46 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ToolType.LOCAL_SHELL # type: ignore -class LocalShellToolCallItemParam(ItemParam, discriminator="local_shell_call"): - """A tool call to run a command on the local shell. +class FolderDatasetVersion(DatasetVersion, discriminator="uri_folder"): + """FileDatasetVersion Definition. - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.LOCAL_SHELL_CALL - :ivar call_id: The unique ID of the local shell tool call generated by the model. Required. - :vartype call_id: str - :ivar action: Required. - :vartype action: ~azure.ai.projects.models.LocalShellExecAction + :ivar data_uri: URI of the data (`example `_). + Required. + :vartype data_uri: str + :ivar is_reference: Indicates if the dataset holds a reference to the storage, or the dataset + manages storage itself. If true, the underlying data will not be deleted when the dataset + version is deleted. + :vartype is_reference: bool + :ivar connection_name: The Azure Storage Account connection name. Required if + startPendingUploadVersion was not called before creating the Dataset. + :vartype connection_name: str + :ivar id: Asset ID, a unique identifier for the asset. + :vartype id: str + :ivar name: The name of the resource. Required. + :vartype name: str + :ivar version: The version of the resource. Required. + :vartype version: str + :ivar description: The asset description text. + :vartype description: str + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar type: Dataset type. Required. URI folder. + :vartype type: str or ~azure.ai.projects.models.URI_FOLDER """ - type: Literal[ItemType.LOCAL_SHELL_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique ID of the local shell tool call generated by the model. Required.""" - action: "_models.LocalShellExecAction" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Required.""" + type: Literal[DatasetType.URI_FOLDER] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Dataset type. Required. URI folder.""" @overload def __init__( self, *, - call_id: str, - action: "_models.LocalShellExecAction", + data_uri: str, + connection_name: Optional[str] = None, + description: Optional[str] = None, + tags: Optional[dict[str, str]] = None, ) -> None: ... @overload @@ -7114,48 +5444,30 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ItemType.LOCAL_SHELL_CALL # type: ignore + self.type = DatasetType.URI_FOLDER # type: ignore -class LocalShellToolCallItemResource(ItemResource, discriminator="local_shell_call"): - """A tool call to run a command on the local shell. +class FunctionAndCustomToolCallOutput(_Model): + """FunctionAndCustomToolCallOutput. - :ivar id: Required. - :vartype id: str - :ivar created_by: The information about the creator of the item. - :vartype created_by: ~azure.ai.projects.models.CreatedBy - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.LOCAL_SHELL_CALL - :ivar status: Required. Is one of the following types: Literal["in_progress"], - Literal["completed"], Literal["incomplete"] - :vartype status: str or str or str - :ivar call_id: The unique ID of the local shell tool call generated by the model. Required. - :vartype call_id: str - :ivar action: Required. - :vartype action: ~azure.ai.projects.models.LocalShellExecAction + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + FunctionAndCustomToolCallOutputInputFileContent, + FunctionAndCustomToolCallOutputInputImageContent, + FunctionAndCustomToolCallOutputInputTextContent + + :ivar type: Required. Known values are: "input_text", "input_image", and "input_file". + :vartype type: str or ~azure.ai.projects.models.FunctionAndCustomToolCallOutputType """ - type: Literal[ItemType.LOCAL_SHELL_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - status: Literal["in_progress", "completed", "incomplete"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """Required. Is one of the following types: Literal[\"in_progress\"], Literal[\"completed\"], - Literal[\"incomplete\"]""" - call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique ID of the local shell tool call generated by the model. Required.""" - action: "_models.LocalShellExecAction" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Required.""" + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Required. Known values are: \"input_text\", \"input_image\", and \"input_file\".""" @overload def __init__( self, *, - id: str, # pylint: disable=redefined-builtin - status: Literal["in_progress", "completed", "incomplete"], - call_id: str, - action: "_models.LocalShellExecAction", - created_by: Optional["_models.CreatedBy"] = None, + type: str, ) -> None: ... @overload @@ -7167,28 +5479,43 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ItemType.LOCAL_SHELL_CALL # type: ignore -class LocalShellToolCallOutputItemParam(ItemParam, discriminator="local_shell_call_output"): - """The output of a local shell tool call. +class FunctionAndCustomToolCallOutputInputFileContent( + FunctionAndCustomToolCallOutput, discriminator="input_file" +): # pylint: disable=name-too-long + """Input file. - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.LOCAL_SHELL_CALL_OUTPUT - :ivar output: A JSON string of the output of the local shell tool call. Required. - :vartype output: str + :ivar type: The type of the input item. Always ``input_file``. Required. + :vartype type: str or ~azure.ai.projects.models.INPUT_FILE + :ivar file_id: + :vartype file_id: str + :ivar filename: The name of the file to be sent to the model. + :vartype filename: str + :ivar file_url: The URL of the file to be sent to the model. + :vartype file_url: str + :ivar file_data: The content of the file to be sent to the model. + :vartype file_data: str """ - type: Literal[ItemType.LOCAL_SHELL_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - output: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """A JSON string of the output of the local shell tool call. Required.""" + type: Literal[FunctionAndCustomToolCallOutputType.INPUT_FILE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the input item. Always ``input_file``. Required.""" + file_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + filename: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the file to be sent to the model.""" + file_url: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The URL of the file to be sent to the model.""" + file_data: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The content of the file to be sent to the model.""" @overload def __init__( self, *, - output: str, + file_id: Optional[str] = None, + filename: Optional[str] = None, + file_url: Optional[str] = None, + file_data: Optional[str] = None, ) -> None: ... @overload @@ -7200,43 +5527,40 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ItemType.LOCAL_SHELL_CALL_OUTPUT # type: ignore + self.type = FunctionAndCustomToolCallOutputType.INPUT_FILE # type: ignore -class LocalShellToolCallOutputItemResource(ItemResource, discriminator="local_shell_call_output"): - """The output of a local shell tool call. +class FunctionAndCustomToolCallOutputInputImageContent( + FunctionAndCustomToolCallOutput, discriminator="input_image" +): # pylint: disable=name-too-long + """Input image. - :ivar id: Required. - :vartype id: str - :ivar created_by: The information about the creator of the item. - :vartype created_by: ~azure.ai.projects.models.CreatedBy - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.LOCAL_SHELL_CALL_OUTPUT - :ivar status: Required. Is one of the following types: Literal["in_progress"], - Literal["completed"], Literal["incomplete"] - :vartype status: str or str or str - :ivar output: A JSON string of the output of the local shell tool call. Required. - :vartype output: str + :ivar type: The type of the input item. Always ``input_image``. Required. + :vartype type: str or ~azure.ai.projects.models.INPUT_IMAGE + :ivar image_url: + :vartype image_url: str + :ivar file_id: + :vartype file_id: str + :ivar detail: The detail level of the image to be sent to the model. One of ``high``, ``low``, + or ``auto``. Defaults to ``auto``. Required. Known values are: "low", "high", and "auto". + :vartype detail: str or ~azure.ai.projects.models.ImageDetail """ - type: Literal[ItemType.LOCAL_SHELL_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - status: Literal["in_progress", "completed", "incomplete"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """Required. Is one of the following types: Literal[\"in_progress\"], Literal[\"completed\"], - Literal[\"incomplete\"]""" - output: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """A JSON string of the output of the local shell tool call. Required.""" + type: Literal[FunctionAndCustomToolCallOutputType.INPUT_IMAGE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the input item. Always ``input_image``. Required.""" + image_url: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + file_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + detail: Union[str, "_models.ImageDetail"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The detail level of the image to be sent to the model. One of ``high``, ``low``, or ``auto``. + Defaults to ``auto``. Required. Known values are: \"low\", \"high\", and \"auto\".""" @overload def __init__( self, *, - id: str, # pylint: disable=redefined-builtin - status: Literal["in_progress", "completed", "incomplete"], - output: str, - created_by: Optional["_models.CreatedBy"] = None, + detail: Union[str, "_models.ImageDetail"], + image_url: Optional[str] = None, + file_id: Optional[str] = None, ) -> None: ... @overload @@ -7248,39 +5572,30 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ItemType.LOCAL_SHELL_CALL_OUTPUT # type: ignore + self.type = FunctionAndCustomToolCallOutputType.INPUT_IMAGE # type: ignore -class LogProb(_Model): - """The log probability of a token. +class FunctionAndCustomToolCallOutputInputTextContent( + FunctionAndCustomToolCallOutput, discriminator="input_text" +): # pylint: disable=name-too-long + """Input text. - :ivar token: Required. - :vartype token: str - :ivar logprob: Required. - :vartype logprob: float - :ivar bytes: Required. - :vartype bytes: list[int] - :ivar top_logprobs: Required. - :vartype top_logprobs: list[~azure.ai.projects.models.TopLogProb] + :ivar type: The type of the input item. Always ``input_text``. Required. + :vartype type: str or ~azure.ai.projects.models.INPUT_TEXT + :ivar text: The text input to the model. Required. + :vartype text: str """ - token: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Required.""" - logprob: float = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Required.""" - bytes: list[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Required.""" - top_logprobs: list["_models.TopLogProb"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Required.""" + type: Literal[FunctionAndCustomToolCallOutputType.INPUT_TEXT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the input item. Always ``input_text``. Required.""" + text: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The text input to the model. Required.""" @overload def __init__( self, - *, - token: str, - logprob: float, - bytes: list[int], - top_logprobs: list["_models.TopLogProb"], + *, + text: str, ) -> None: ... @overload @@ -7292,39 +5607,34 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) + self.type = FunctionAndCustomToolCallOutputType.INPUT_TEXT # type: ignore -class ManagedAzureAISearchIndex(Index, discriminator="ManagedAzureSearch"): - """Managed Azure AI Search Index Definition. +class FunctionShellAction(_Model): + """Shell exec action. - :ivar id: Asset ID, a unique identifier for the asset. - :vartype id: str - :ivar name: The name of the resource. Required. - :vartype name: str - :ivar version: The version of the resource. Required. - :vartype version: str - :ivar description: The asset description text. - :vartype description: str - :ivar tags: Tag dictionary. Tags can be added, removed, and updated. - :vartype tags: dict[str, str] - :ivar type: Type of index. Required. Managed Azure Search - :vartype type: str or ~azure.ai.projects.models.MANAGED_AZURE_SEARCH - :ivar vector_store_id: Vector store id of managed index. Required. - :vartype vector_store_id: str + :ivar commands: Required. + :vartype commands: list[str] + :ivar timeout_ms: Required. + :vartype timeout_ms: int + :ivar max_output_length: Required. + :vartype max_output_length: int """ - type: Literal[IndexType.MANAGED_AZURE_SEARCH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Type of index. Required. Managed Azure Search""" - vector_store_id: str = rest_field(name="vectorStoreId", visibility=["create"]) - """Vector store id of managed index. Required.""" + commands: list[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required.""" + timeout_ms: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required.""" + max_output_length: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required.""" @overload def __init__( self, *, - vector_store_id: str, - description: Optional[str] = None, - tags: Optional[dict[str, str]] = None, + commands: list[str], + timeout_ms: int, + max_output_length: int, ) -> None: ... @overload @@ -7336,38 +5646,31 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = IndexType.MANAGED_AZURE_SEARCH # type: ignore -class MCPApprovalRequestItemParam(ItemParam, discriminator="mcp_approval_request"): - """A request for human approval of a tool invocation. +class FunctionShellActionParam(_Model): + """Shell action. - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.MCP_APPROVAL_REQUEST - :ivar server_label: The label of the MCP server making the request. Required. - :vartype server_label: str - :ivar name: The name of the tool to run. Required. - :vartype name: str - :ivar arguments: A JSON string of arguments for the tool. Required. - :vartype arguments: str + :ivar commands: Ordered shell commands for the execution environment to run. Required. + :vartype commands: list[str] + :ivar timeout_ms: + :vartype timeout_ms: int + :ivar max_output_length: + :vartype max_output_length: int """ - type: Literal[ItemType.MCP_APPROVAL_REQUEST] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The label of the MCP server making the request. Required.""" - name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The name of the tool to run. Required.""" - arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """A JSON string of arguments for the tool. Required.""" + commands: list[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Ordered shell commands for the execution environment to run. Required.""" + timeout_ms: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + max_output_length: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) @overload def __init__( self, *, - server_label: str, - name: str, - arguments: str, + commands: list[str], + timeout_ms: Optional[int] = None, + max_output_length: Optional[int] = None, ) -> None: ... @overload @@ -7379,44 +5682,44 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ItemType.MCP_APPROVAL_REQUEST # type: ignore -class MCPApprovalRequestItemResource(ItemResource, discriminator="mcp_approval_request"): - """A request for human approval of a tool invocation. +class FunctionShellCallOutputContent(_Model): + """Shell call output content. - :ivar id: Required. - :vartype id: str - :ivar created_by: The information about the creator of the item. - :vartype created_by: ~azure.ai.projects.models.CreatedBy - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.MCP_APPROVAL_REQUEST - :ivar server_label: The label of the MCP server making the request. Required. - :vartype server_label: str - :ivar name: The name of the tool to run. Required. - :vartype name: str - :ivar arguments: A JSON string of arguments for the tool. Required. - :vartype arguments: str + :ivar stdout: The standard output that was captured. Required. + :vartype stdout: str + :ivar stderr: The standard error output that was captured. Required. + :vartype stderr: str + :ivar outcome: Represents either an exit outcome (with an exit code) or a timeout outcome for a + shell call output chunk. Required. + :vartype outcome: ~azure.ai.projects.models.FunctionShellCallOutputOutcome + :ivar created_by: The identifier of the actor that created the item. + :vartype created_by: str """ - type: Literal[ItemType.MCP_APPROVAL_REQUEST] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The label of the MCP server making the request. Required.""" - name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The name of the tool to run. Required.""" - arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """A JSON string of arguments for the tool. Required.""" + stdout: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The standard output that was captured. Required.""" + stderr: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The standard error output that was captured. Required.""" + outcome: "_models.FunctionShellCallOutputOutcome" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Represents either an exit outcome (with an exit code) or a timeout outcome for a shell call + output chunk. Required.""" + created_by: Optional[str] = rest_field( # pyright: ignore[reportIncompatibleVariableOverride] + visibility=["read", "create", "update", "delete", "query"] + ) + """The identifier of the actor that created the item.""" @overload def __init__( self, *, - id: str, # pylint: disable=redefined-builtin - server_label: str, - name: str, - arguments: str, - created_by: Optional["_models.CreatedBy"] = None, + stdout: str, + stderr: str, + outcome: "_models.FunctionShellCallOutputOutcome", + created_by: Optional[str] = None, ) -> None: ... @overload @@ -7428,38 +5731,35 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ItemType.MCP_APPROVAL_REQUEST # type: ignore -class MCPApprovalResponseItemParam(ItemParam, discriminator="mcp_approval_response"): - """A response to an MCP approval request. +class FunctionShellCallOutputContentParam(_Model): + """Shell output content. - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.MCP_APPROVAL_RESPONSE - :ivar approval_request_id: The ID of the approval request being answered. Required. - :vartype approval_request_id: str - :ivar approve: Whether the request was approved. Required. - :vartype approve: bool - :ivar reason: Optional reason for the decision. - :vartype reason: str + :ivar stdout: Captured stdout output for the shell call. Required. + :vartype stdout: str + :ivar stderr: Captured stderr output for the shell call. Required. + :vartype stderr: str + :ivar outcome: The exit or timeout outcome associated with this shell call. Required. + :vartype outcome: ~azure.ai.projects.models.FunctionShellCallOutputOutcomeParam """ - type: Literal[ItemType.MCP_APPROVAL_RESPONSE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - approval_request_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The ID of the approval request being answered. Required.""" - approve: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Whether the request was approved. Required.""" - reason: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Optional reason for the decision.""" + stdout: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Captured stdout output for the shell call. Required.""" + stderr: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Captured stderr output for the shell call. Required.""" + outcome: "_models.FunctionShellCallOutputOutcomeParam" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The exit or timeout outcome associated with this shell call. Required.""" @overload def __init__( self, *, - approval_request_id: str, - approve: bool, - reason: Optional[str] = None, + stdout: str, + stderr: str, + outcome: "_models.FunctionShellCallOutputOutcomeParam", ) -> None: ... @overload @@ -7471,44 +5771,27 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ItemType.MCP_APPROVAL_RESPONSE # type: ignore -class MCPApprovalResponseItemResource(ItemResource, discriminator="mcp_approval_response"): - """A response to an MCP approval request. +class FunctionShellCallOutputOutcome(_Model): + """Shell call outcome. - :ivar id: Required. - :vartype id: str - :ivar created_by: The information about the creator of the item. - :vartype created_by: ~azure.ai.projects.models.CreatedBy - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.MCP_APPROVAL_RESPONSE - :ivar approval_request_id: The ID of the approval request being answered. Required. - :vartype approval_request_id: str - :ivar approve: Whether the request was approved. Required. - :vartype approve: bool - :ivar reason: Optional reason for the decision. - :vartype reason: str + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + FunctionShellCallOutputExitOutcome, FunctionShellCallOutputTimeoutOutcome + + :ivar type: Required. Known values are: "timeout" and "exit". + :vartype type: str or ~azure.ai.projects.models.FunctionShellCallOutputOutcomeType """ - type: Literal[ItemType.MCP_APPROVAL_RESPONSE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - approval_request_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The ID of the approval request being answered. Required.""" - approve: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Whether the request was approved. Required.""" - reason: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Optional reason for the decision.""" + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Required. Known values are: \"timeout\" and \"exit\".""" @overload def __init__( self, *, - id: str, # pylint: disable=redefined-builtin - approval_request_id: str, - approve: bool, - created_by: Optional["_models.CreatedBy"] = None, - reason: Optional[str] = None, + type: str, ) -> None: ... @overload @@ -7520,48 +5803,27 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ItemType.MCP_APPROVAL_RESPONSE # type: ignore -class MCPCallItemParam(ItemParam, discriminator="mcp_call"): - """An invocation of a tool on an MCP server. +class FunctionShellCallOutputExitOutcome(FunctionShellCallOutputOutcome, discriminator="exit"): + """Shell call exit outcome. - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.MCP_CALL - :ivar server_label: The label of the MCP server running the tool. Required. - :vartype server_label: str - :ivar name: The name of the tool that was run. Required. - :vartype name: str - :ivar arguments: A JSON string of the arguments passed to the tool. Required. - :vartype arguments: str - :ivar output: The output from the tool call. - :vartype output: str - :ivar error: The error from the tool call, if any. - :vartype error: str + :ivar type: The outcome type. Always ``exit``. Required. + :vartype type: str or ~azure.ai.projects.models.EXIT + :ivar exit_code: Exit code from the shell process. Required. + :vartype exit_code: int """ - type: Literal[ItemType.MCP_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The label of the MCP server running the tool. Required.""" - name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The name of the tool that was run. Required.""" - arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """A JSON string of the arguments passed to the tool. Required.""" - output: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The output from the tool call.""" - error: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The error from the tool call, if any.""" + type: Literal[FunctionShellCallOutputOutcomeType.EXIT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The outcome type. Always ``exit``. Required.""" + exit_code: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Exit code from the shell process. Required.""" @overload def __init__( self, *, - server_label: str, - name: str, - arguments: str, - output: Optional[str] = None, - error: Optional[str] = None, + exit_code: int, ) -> None: ... @overload @@ -7573,54 +5835,28 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ItemType.MCP_CALL # type: ignore + self.type = FunctionShellCallOutputOutcomeType.EXIT # type: ignore -class MCPCallItemResource(ItemResource, discriminator="mcp_call"): - """An invocation of a tool on an MCP server. +class FunctionShellCallOutputOutcomeParam(_Model): + """Shell call outcome. - :ivar id: Required. - :vartype id: str - :ivar created_by: The information about the creator of the item. - :vartype created_by: ~azure.ai.projects.models.CreatedBy - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.MCP_CALL - :ivar server_label: The label of the MCP server running the tool. Required. - :vartype server_label: str - :ivar name: The name of the tool that was run. Required. - :vartype name: str - :ivar arguments: A JSON string of the arguments passed to the tool. Required. - :vartype arguments: str - :ivar output: The output from the tool call. - :vartype output: str - :ivar error: The error from the tool call, if any. - :vartype error: str + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + FunctionShellCallOutputExitOutcomeParam, FunctionShellCallOutputTimeoutOutcomeParam + + :ivar type: Required. Known values are: "timeout" and "exit". + :vartype type: str or ~azure.ai.projects.models.FunctionShellCallOutputOutcomeParamType """ - type: Literal[ItemType.MCP_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The label of the MCP server running the tool. Required.""" - name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The name of the tool that was run. Required.""" - arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """A JSON string of the arguments passed to the tool. Required.""" - output: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The output from the tool call.""" - error: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The error from the tool call, if any.""" + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Required. Known values are: \"timeout\" and \"exit\".""" @overload def __init__( self, *, - id: str, # pylint: disable=redefined-builtin - server_label: str, - name: str, - arguments: str, - created_by: Optional["_models.CreatedBy"] = None, - output: Optional[str] = None, - error: Optional[str] = None, + type: str, ) -> None: ... @overload @@ -7632,38 +5868,27 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ItemType.MCP_CALL # type: ignore -class MCPListToolsItemParam(ItemParam, discriminator="mcp_list_tools"): - """A list of tools available on an MCP server. +class FunctionShellCallOutputExitOutcomeParam(FunctionShellCallOutputOutcomeParam, discriminator="exit"): + """Shell call exit outcome. - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.MCP_LIST_TOOLS - :ivar server_label: The label of the MCP server. Required. - :vartype server_label: str - :ivar tools: The tools available on the server. Required. - :vartype tools: list[~azure.ai.projects.models.MCPListToolsTool] - :ivar error: Error message if the server could not list tools. - :vartype error: str + :ivar type: The outcome type. Always ``exit``. Required. + :vartype type: str or ~azure.ai.projects.models.EXIT + :ivar exit_code: The exit code returned by the shell process. Required. + :vartype exit_code: int """ - type: Literal[ItemType.MCP_LIST_TOOLS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The label of the MCP server. Required.""" - tools: list["_models.MCPListToolsTool"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The tools available on the server. Required.""" - error: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Error message if the server could not list tools.""" + type: Literal[FunctionShellCallOutputOutcomeParamType.EXIT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The outcome type. Always ``exit``. Required.""" + exit_code: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The exit code returned by the shell process. Required.""" @overload def __init__( self, *, - server_label: str, - tools: list["_models.MCPListToolsTool"], - error: Optional[str] = None, + exit_code: int, ) -> None: ... @overload @@ -7675,44 +5900,22 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ItemType.MCP_LIST_TOOLS # type: ignore + self.type = FunctionShellCallOutputOutcomeParamType.EXIT # type: ignore -class MCPListToolsItemResource(ItemResource, discriminator="mcp_list_tools"): - """A list of tools available on an MCP server. +class FunctionShellCallOutputTimeoutOutcome(FunctionShellCallOutputOutcome, discriminator="timeout"): + """Shell call timeout outcome. - :ivar id: Required. - :vartype id: str - :ivar created_by: The information about the creator of the item. - :vartype created_by: ~azure.ai.projects.models.CreatedBy - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.MCP_LIST_TOOLS - :ivar server_label: The label of the MCP server. Required. - :vartype server_label: str - :ivar tools: The tools available on the server. Required. - :vartype tools: list[~azure.ai.projects.models.MCPListToolsTool] - :ivar error: Error message if the server could not list tools. - :vartype error: str + :ivar type: The outcome type. Always ``timeout``. Required. + :vartype type: str or ~azure.ai.projects.models.TIMEOUT """ - type: Literal[ItemType.MCP_LIST_TOOLS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The label of the MCP server. Required.""" - tools: list["_models.MCPListToolsTool"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The tools available on the server. Required.""" - error: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Error message if the server could not list tools.""" + type: Literal[FunctionShellCallOutputOutcomeType.TIMEOUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The outcome type. Always ``timeout``. Required.""" @overload def __init__( self, - *, - id: str, # pylint: disable=redefined-builtin - server_label: str, - tools: list["_models.MCPListToolsTool"], - created_by: Optional["_models.CreatedBy"] = None, - error: Optional[str] = None, ) -> None: ... @overload @@ -7724,39 +5927,24 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ItemType.MCP_LIST_TOOLS # type: ignore + self.type = FunctionShellCallOutputOutcomeType.TIMEOUT # type: ignore -class MCPListToolsTool(_Model): - """A tool available on an MCP server. +class FunctionShellCallOutputTimeoutOutcomeParam( + FunctionShellCallOutputOutcomeParam, discriminator="timeout" +): # pylint: disable=name-too-long + """Shell call timeout outcome. - :ivar name: The name of the tool. Required. - :vartype name: str - :ivar description: The description of the tool. - :vartype description: str - :ivar input_schema: The JSON schema describing the tool's input. Required. - :vartype input_schema: any - :ivar annotations: Additional annotations about the tool. - :vartype annotations: any + :ivar type: The outcome type. Always ``timeout``. Required. + :vartype type: str or ~azure.ai.projects.models.TIMEOUT """ - name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The name of the tool. Required.""" - description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The description of the tool.""" - input_schema: Any = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The JSON schema describing the tool's input. Required.""" - annotations: Optional[Any] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Additional annotations about the tool.""" + type: Literal[FunctionShellCallOutputOutcomeParamType.TIMEOUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The outcome type. Always ``timeout``. Required.""" @overload def __init__( self, - *, - name: str, - input_schema: Any, - description: Optional[str] = None, - annotations: Optional[Any] = None, ) -> None: ... @overload @@ -7768,69 +5956,22 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) + self.type = FunctionShellCallOutputOutcomeParamType.TIMEOUT # type: ignore -class MCPTool(Tool, discriminator="mcp"): - """Give the model access to additional tools via remote Model Context Protocol - (MCP) servers. `Learn more about MCP - `_. +class FunctionShellToolParam(Tool, discriminator="shell"): + """Shell tool. - :ivar type: The type of the MCP tool. Always ``mcp``. Required. - :vartype type: str or ~azure.ai.projects.models.MCP - :ivar server_label: A label for this MCP server, used to identify it in tool calls. Required. - :vartype server_label: str - :ivar server_url: The URL for the MCP server. Required. - :vartype server_url: str - :ivar headers: Optional HTTP headers to send to the MCP server. Use for authentication - or other purposes. - :vartype headers: dict[str, str] - :ivar allowed_tools: List of allowed tool names or a filter object. Is either a [str] type or a - MCPToolAllowedTools1 type. - :vartype allowed_tools: list[str] or ~azure.ai.projects.models.MCPToolAllowedTools1 - :ivar require_approval: Specify which of the MCP server's tools require approval. Is one of the - following types: MCPToolRequireApproval1, Literal["always"], Literal["never"] - :vartype require_approval: ~azure.ai.projects.models.MCPToolRequireApproval1 or str or str - :ivar project_connection_id: The connection ID in the project for the MCP server. The - connection stores authentication and other connection details needed to connect to the MCP - server. - :vartype project_connection_id: str + :ivar type: The type of the shell tool. Always ``shell``. Required. + :vartype type: str or ~azure.ai.projects.models.SHELL """ - type: Literal[ToolType.MCP] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the MCP tool. Always ``mcp``. Required.""" - server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """A label for this MCP server, used to identify it in tool calls. Required.""" - server_url: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The URL for the MCP server. Required.""" - headers: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Optional HTTP headers to send to the MCP server. Use for authentication - or other purposes.""" - allowed_tools: Optional[Union[list[str], "_models.MCPToolAllowedTools1"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """List of allowed tool names or a filter object. Is either a [str] type or a MCPToolAllowedTools1 - type.""" - require_approval: Optional[Union["_models.MCPToolRequireApproval1", Literal["always"], Literal["never"]]] = ( - rest_field(visibility=["read", "create", "update", "delete", "query"]) - ) - """Specify which of the MCP server's tools require approval. Is one of the following types: - MCPToolRequireApproval1, Literal[\"always\"], Literal[\"never\"]""" - project_connection_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The connection ID in the project for the MCP server. The connection stores authentication and - other connection details needed to connect to the MCP server.""" - - @overload - def __init__( - self, - *, - server_label: str, - server_url: str, - headers: Optional[dict[str, str]] = None, - allowed_tools: Optional[Union[list[str], "_models.MCPToolAllowedTools1"]] = None, - require_approval: Optional[ - Union["_models.MCPToolRequireApproval1", Literal["always"], Literal["never"]] - ] = None, - project_connection_id: Optional[str] = None, + type: Literal[ToolType.SHELL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the shell tool. Always ``shell``. Required.""" + + @overload + def __init__( + self, ) -> None: ... @overload @@ -7842,24 +5983,42 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ToolType.MCP # type: ignore + self.type = ToolType.SHELL # type: ignore -class MCPToolAllowedTools1(_Model): - """MCPToolAllowedTools1. +class FunctionTool(Tool, discriminator="function"): + """Function. - :ivar tool_names: List of allowed tool names. - :vartype tool_names: list[str] + :ivar type: The type of the function tool. Always ``function``. Required. + :vartype type: str or ~azure.ai.projects.models.FUNCTION + :ivar name: The name of the function to call. Required. + :vartype name: str + :ivar description: + :vartype description: str + :ivar parameters: Required. + :vartype parameters: dict[str, any] + :ivar strict: Required. + :vartype strict: bool """ - tool_names: Optional[list[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """List of allowed tool names.""" + type: Literal[ToolType.FUNCTION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the function tool. Always ``function``. Required.""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the function to call. Required.""" + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + parameters: dict[str, Any] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required.""" + strict: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required.""" @overload def __init__( self, *, - tool_names: Optional[list[str]] = None, + name: str, + parameters: dict[str, Any], + strict: bool, + description: Optional[str] = None, ) -> None: ... @overload @@ -7871,32 +6030,63 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) + self.type = ToolType.FUNCTION # type: ignore + +class HostedAgentDefinition(AgentDefinition, discriminator="hosted"): + """The hosted agent definition. -class MCPToolRequireApproval1(_Model): - """MCPToolRequireApproval1. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + ImageBasedHostedAgentDefinition - :ivar always: A list of tools that always require approval. - :vartype always: ~azure.ai.projects.models.MCPToolRequireApprovalAlways - :ivar never: A list of tools that never require approval. - :vartype never: ~azure.ai.projects.models.MCPToolRequireApprovalNever + :ivar rai_config: Configuration for Responsible AI (RAI) content filtering and safety features. + :vartype rai_config: ~azure.ai.projects.models.RaiConfig + :ivar kind: Required. + :vartype kind: str or ~azure.ai.projects.models.HOSTED + :ivar tools: An array of tools the hosted agent's model may call while generating a response. + You + can specify which tool to use by setting the ``tool_choice`` parameter. + :vartype tools: list[~azure.ai.projects.models.Tool] + :ivar container_protocol_versions: The protocols that the agent supports for ingress + communication of the containers. Required. + :vartype container_protocol_versions: list[~azure.ai.projects.models.ProtocolVersionRecord] + :ivar cpu: The CPU configuration for the hosted agent. Required. + :vartype cpu: str + :ivar memory: The memory configuration for the hosted agent. Required. + :vartype memory: str + :ivar environment_variables: Environment variables to set in the hosted agent container. + :vartype environment_variables: dict[str, str] """ - always: Optional["_models.MCPToolRequireApprovalAlways"] = rest_field( + __mapping__: dict[str, _Model] = {} + kind: Literal[AgentKind.HOSTED] = rest_discriminator(name="kind", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + tools: Optional[list["_models.Tool"]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """An array of tools the hosted agent's model may call while generating a response. You + can specify which tool to use by setting the ``tool_choice`` parameter.""" + container_protocol_versions: list["_models.ProtocolVersionRecord"] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) - """A list of tools that always require approval.""" - never: Optional["_models.MCPToolRequireApprovalNever"] = rest_field( + """The protocols that the agent supports for ingress communication of the containers. Required.""" + cpu: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The CPU configuration for the hosted agent. Required.""" + memory: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The memory configuration for the hosted agent. Required.""" + environment_variables: Optional[dict[str, str]] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) - """A list of tools that never require approval.""" + """Environment variables to set in the hosted agent container.""" @overload def __init__( self, *, - always: Optional["_models.MCPToolRequireApprovalAlways"] = None, - never: Optional["_models.MCPToolRequireApprovalNever"] = None, + container_protocol_versions: list["_models.ProtocolVersionRecord"], + cpu: str, + memory: str, + rai_config: Optional["_models.RaiConfig"] = None, + tools: Optional[list["_models.Tool"]] = None, + environment_variables: Optional[dict[str, str]] = None, ) -> None: ... @overload @@ -7908,23 +6098,22 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) + self.kind = AgentKind.HOSTED # type: ignore -class MCPToolRequireApprovalAlways(_Model): - """MCPToolRequireApprovalAlways. +class HourlyRecurrenceSchedule(RecurrenceSchedule, discriminator="Hourly"): + """Hourly recurrence schedule. - :ivar tool_names: List of tools that require approval. - :vartype tool_names: list[str] + :ivar type: Required. Hourly recurrence pattern. + :vartype type: str or ~azure.ai.projects.models.HOURLY """ - tool_names: Optional[list[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """List of tools that require approval.""" + type: Literal[RecurrenceType.HOURLY] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required. Hourly recurrence pattern.""" @overload def __init__( self, - *, - tool_names: Optional[list[str]] = None, ) -> None: ... @overload @@ -7936,23 +6125,28 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) + self.type = RecurrenceType.HOURLY # type: ignore -class MCPToolRequireApprovalNever(_Model): - """MCPToolRequireApprovalNever. +class HumanEvaluationRuleAction(EvaluationRuleAction, discriminator="humanEvaluation"): + """Evaluation rule action for human evaluation. - :ivar tool_names: List of tools that do not require approval. - :vartype tool_names: list[str] + :ivar type: Required. Human evaluation. + :vartype type: str or ~azure.ai.projects.models.HUMAN_EVALUATION + :ivar template_id: Human evaluation template Id. Required. + :vartype template_id: str """ - tool_names: Optional[list[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """List of tools that do not require approval.""" + type: Literal[EvaluationRuleActionType.HUMAN_EVALUATION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required. Human evaluation.""" + template_id: str = rest_field(name="templateId", visibility=["read", "create", "update", "delete", "query"]) + """Human evaluation template Id. Required.""" @overload def __init__( self, *, - tool_names: Optional[list[str]] = None, + template_id: str, ) -> None: ... @overload @@ -7964,32 +6158,29 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) + self.type = EvaluationRuleActionType.HUMAN_EVALUATION # type: ignore -class MemoryOperation(_Model): - """Represents a single memory operation (create, update, or delete) performed on a memory item. +class HybridSearchOptions(_Model): + """HybridSearchOptions. - :ivar kind: The type of memory operation being performed. Required. Known values are: "create", - "update", and "delete". - :vartype kind: str or ~azure.ai.projects.models.MemoryOperationKind - :ivar memory_item: The memory item to create, update, or delete. Required. - :vartype memory_item: ~azure.ai.projects.models.MemoryItem + :ivar embedding_weight: The weight of the embedding in the reciprocal ranking fusion. Required. + :vartype embedding_weight: float + :ivar text_weight: The weight of the text in the reciprocal ranking fusion. Required. + :vartype text_weight: float """ - kind: Union[str, "_models.MemoryOperationKind"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The type of memory operation being performed. Required. Known values are: \"create\", - \"update\", and \"delete\".""" - memory_item: "_models.MemoryItem" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The memory item to create, update, or delete. Required.""" + embedding_weight: float = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The weight of the embedding in the reciprocal ranking fusion. Required.""" + text_weight: float = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The weight of the text in the reciprocal ranking fusion. Required.""" @overload def __init__( self, *, - kind: Union[str, "_models.MemoryOperationKind"], - memory_item: "_models.MemoryItem", + embedding_weight: float, + text_weight: float, ) -> None: ... @overload @@ -8003,21 +6194,44 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class MemorySearchItem(_Model): - """A retrieved memory item from memory search. +class ImageBasedHostedAgentDefinition(HostedAgentDefinition, discriminator="hosted"): + """The image-based deployment definition for a hosted agent. - :ivar memory_item: Retrieved memory item. Required. - :vartype memory_item: ~azure.ai.projects.models.MemoryItem + :ivar rai_config: Configuration for Responsible AI (RAI) content filtering and safety features. + :vartype rai_config: ~azure.ai.projects.models.RaiConfig + :ivar tools: An array of tools the hosted agent's model may call while generating a response. + You + can specify which tool to use by setting the ``tool_choice`` parameter. + :vartype tools: list[~azure.ai.projects.models.Tool] + :ivar container_protocol_versions: The protocols that the agent supports for ingress + communication of the containers. Required. + :vartype container_protocol_versions: list[~azure.ai.projects.models.ProtocolVersionRecord] + :ivar cpu: The CPU configuration for the hosted agent. Required. + :vartype cpu: str + :ivar memory: The memory configuration for the hosted agent. Required. + :vartype memory: str + :ivar environment_variables: Environment variables to set in the hosted agent container. + :vartype environment_variables: dict[str, str] + :ivar kind: Required. + :vartype kind: str or ~azure.ai.projects.models.HOSTED + :ivar image: The image for the hosted agent. Required. + :vartype image: str """ - memory_item: "_models.MemoryItem" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Retrieved memory item. Required.""" + image: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The image for the hosted agent. Required.""" @overload def __init__( self, *, - memory_item: "_models.MemoryItem", + container_protocol_versions: list["_models.ProtocolVersionRecord"], + cpu: str, + memory: str, + image: str, + rai_config: Optional["_models.RaiConfig"] = None, + tools: Optional[list["_models.Tool"]] = None, + environment_variables: Optional[dict[str, str]] = None, ) -> None: ... @overload @@ -8031,21 +6245,108 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class MemorySearchOptions(_Model): - """Memory search options. +class ImageGenTool(Tool, discriminator="image_generation"): + """Image generation tool. - :ivar max_memories: Maximum number of memory items to return. - :vartype max_memories: int + :ivar type: The type of the image generation tool. Always ``image_generation``. Required. + :vartype type: str or ~azure.ai.projects.models.IMAGE_GENERATION + :ivar model: Is one of the following types: Literal["gpt-image-1"], + Literal["gpt-image-1-mini"], str + :vartype model: str or str or str + :ivar quality: The quality of the generated image. One of ``low``, ``medium``, ``high``, + or ``auto``. Default: ``auto``. Is one of the following types: Literal["low"], + Literal["medium"], Literal["high"], Literal["auto"] + :vartype quality: str or str or str or str + :ivar size: The size of the generated image. One of ``1024x1024``, ``1024x1536``, + ``1536x1024``, or ``auto``. Default: ``auto``. Is one of the following types: + Literal["1024x1024"], Literal["1024x1536"], Literal["1536x1024"], Literal["auto"] + :vartype size: str or str or str or str + :ivar output_format: The output format of the generated image. One of ``png``, ``webp``, or + ``jpeg``. Default: ``png``. Is one of the following types: Literal["png"], Literal["webp"], + Literal["jpeg"] + :vartype output_format: str or str or str + :ivar output_compression: Compression level for the output image. Default: 100. + :vartype output_compression: int + :ivar moderation: Moderation level for the generated image. Default: ``auto``. Is either a + Literal["auto"] type or a Literal["low"] type. + :vartype moderation: str or str + :ivar background: Background type for the generated image. One of ``transparent``, + ``opaque``, or ``auto``. Default: ``auto``. Is one of the following types: + Literal["transparent"], Literal["opaque"], Literal["auto"] + :vartype background: str or str or str + :ivar input_fidelity: Known values are: "high" and "low". + :vartype input_fidelity: str or ~azure.ai.projects.models.InputFidelity + :ivar input_image_mask: Optional mask for inpainting. Contains ``image_url`` + (string, optional) and ``file_id`` (string, optional). + :vartype input_image_mask: ~azure.ai.projects.models.ImageGenToolInputImageMask + :ivar partial_images: Number of partial images to generate in streaming mode, from 0 (default + value) to 3. + :vartype partial_images: int """ - max_memories: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Maximum number of memory items to return.""" + type: Literal[ToolType.IMAGE_GENERATION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the image generation tool. Always ``image_generation``. Required.""" + model: Optional[Union[Literal["gpt-image-1"], Literal["gpt-image-1-mini"], str]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Is one of the following types: Literal[\"gpt-image-1\"], Literal[\"gpt-image-1-mini\"], str""" + quality: Optional[Literal["low", "medium", "high", "auto"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The quality of the generated image. One of ``low``, ``medium``, ``high``, + or ``auto``. Default: ``auto``. Is one of the following types: Literal[\"low\"], + Literal[\"medium\"], Literal[\"high\"], Literal[\"auto\"]""" + size: Optional[Literal["1024x1024", "1024x1536", "1536x1024", "auto"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The size of the generated image. One of ``1024x1024``, ``1024x1536``, + ``1536x1024``, or ``auto``. Default: ``auto``. Is one of the following types: + Literal[\"1024x1024\"], Literal[\"1024x1536\"], Literal[\"1536x1024\"], Literal[\"auto\"]""" + output_format: Optional[Literal["png", "webp", "jpeg"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The output format of the generated image. One of ``png``, ``webp``, or + ``jpeg``. Default: ``png``. Is one of the following types: Literal[\"png\"], + Literal[\"webp\"], Literal[\"jpeg\"]""" + output_compression: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Compression level for the output image. Default: 100.""" + moderation: Optional[Literal["auto", "low"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Moderation level for the generated image. Default: ``auto``. Is either a Literal[\"auto\"] type + or a Literal[\"low\"] type.""" + background: Optional[Literal["transparent", "opaque", "auto"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Background type for the generated image. One of ``transparent``, + ``opaque``, or ``auto``. Default: ``auto``. Is one of the following types: + Literal[\"transparent\"], Literal[\"opaque\"], Literal[\"auto\"]""" + input_fidelity: Optional[Union[str, "_models.InputFidelity"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Known values are: \"high\" and \"low\".""" + input_image_mask: Optional["_models.ImageGenToolInputImageMask"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Optional mask for inpainting. Contains ``image_url`` + (string, optional) and ``file_id`` (string, optional).""" + partial_images: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Number of partial images to generate in streaming mode, from 0 (default value) to 3.""" @overload def __init__( self, *, - max_memories: Optional[int] = None, + model: Optional[Union[Literal["gpt-image-1"], Literal["gpt-image-1-mini"], str]] = None, + quality: Optional[Literal["low", "medium", "high", "auto"]] = None, + size: Optional[Literal["1024x1024", "1024x1536", "1536x1024", "auto"]] = None, + output_format: Optional[Literal["png", "webp", "jpeg"]] = None, + output_compression: Optional[int] = None, + moderation: Optional[Literal["auto", "low"]] = None, + background: Optional[Literal["transparent", "opaque", "auto"]] = None, + input_fidelity: Optional[Union[str, "_models.InputFidelity"]] = None, + input_image_mask: Optional["_models.ImageGenToolInputImageMask"] = None, + partial_images: Optional[int] = None, ) -> None: ... @overload @@ -8057,49 +6358,27 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) + self.type = ToolType.IMAGE_GENERATION # type: ignore -class MemorySearchTool(Tool, discriminator="memory_search"): - """A tool for integrating memories into the agent. +class ImageGenToolInputImageMask(_Model): + """ImageGenToolInputImageMask. - :ivar type: The type of the tool. Always ``memory_search``. Required. - :vartype type: str or ~azure.ai.projects.models.MEMORY_SEARCH - :ivar memory_store_name: The name of the memory store to use. Required. - :vartype memory_store_name: str - :ivar scope: The namespace used to group and isolate memories, such as a user ID. - Limits which memories can be retrieved or updated. - Use special variable ``{{$userId}}`` to scope memories to the current signed-in user. Required. - :vartype scope: str - :ivar search_options: Options for searching the memory store. - :vartype search_options: ~azure.ai.projects.models.MemorySearchOptions - :ivar update_delay: Time to wait before updating memories after inactivity (seconds). Default - 300. - :vartype update_delay: int + :ivar image_url: + :vartype image_url: str + :ivar file_id: + :vartype file_id: str """ - type: Literal[ToolType.MEMORY_SEARCH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the tool. Always ``memory_search``. Required.""" - memory_store_name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The name of the memory store to use. Required.""" - scope: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The namespace used to group and isolate memories, such as a user ID. - Limits which memories can be retrieved or updated. - Use special variable ``{{$userId}}`` to scope memories to the current signed-in user. Required.""" - search_options: Optional["_models.MemorySearchOptions"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """Options for searching the memory store.""" - update_delay: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Time to wait before updating memories after inactivity (seconds). Default 300.""" + image_url: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + file_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) @overload def __init__( self, *, - memory_store_name: str, - scope: str, - search_options: Optional["_models.MemorySearchOptions"] = None, - update_delay: Optional[int] = None, + image_url: Optional[str] = None, + file_id: Optional[str] = None, ) -> None: ... @overload @@ -8111,30 +6390,27 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ToolType.MEMORY_SEARCH # type: ignore -class MemorySearchToolCallItemParam(ItemParam, discriminator="memory_search_call"): - """MemorySearchToolCallItemParam. +class InputContent(_Model): + """InputContent. - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.MEMORY_SEARCH_CALL - :ivar results: The results returned from the memory search. - :vartype results: list[~azure.ai.projects.models.MemorySearchItem] + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + InputContentInputFileContent, InputContentInputImageContent, InputContentInputTextContent + + :ivar type: Required. Known values are: "input_text", "input_image", and "input_file". + :vartype type: str or ~azure.ai.projects.models.InputContentType """ - type: Literal[ItemType.MEMORY_SEARCH_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - results: Optional[list["_models.MemorySearchItem"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The results returned from the memory search.""" + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Required. Known values are: \"input_text\", \"input_image\", and \"input_file\".""" @overload def __init__( self, *, - results: Optional[list["_models.MemorySearchItem"]] = None, + type: str, ) -> None: ... @overload @@ -8146,49 +6422,41 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ItemType.MEMORY_SEARCH_CALL # type: ignore -class MemorySearchToolCallItemResource(ItemResource, discriminator="memory_search_call"): - """MemorySearchToolCallItemResource. +class InputContentInputFileContent(InputContent, discriminator="input_file"): + """Input file. - :ivar id: Required. - :vartype id: str - :ivar created_by: The information about the creator of the item. - :vartype created_by: ~azure.ai.projects.models.CreatedBy - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.MEMORY_SEARCH_CALL - :ivar status: The status of the memory search tool call. One of ``in_progress``, - ``searching``, ``completed``, ``incomplete`` or ``failed``,. Required. Is one of the following - types: Literal["in_progress"], Literal["searching"], Literal["completed"], - Literal["incomplete"], Literal["failed"] - :vartype status: str or str or str or str or str - :ivar results: The results returned from the memory search. - :vartype results: list[~azure.ai.projects.models.MemorySearchItem] + :ivar type: The type of the input item. Always ``input_file``. Required. + :vartype type: str or ~azure.ai.projects.models.INPUT_FILE + :ivar file_id: + :vartype file_id: str + :ivar filename: The name of the file to be sent to the model. + :vartype filename: str + :ivar file_url: The URL of the file to be sent to the model. + :vartype file_url: str + :ivar file_data: The content of the file to be sent to the model. + :vartype file_data: str """ - type: Literal[ItemType.MEMORY_SEARCH_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - status: Literal["in_progress", "searching", "completed", "incomplete", "failed"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The status of the memory search tool call. One of ``in_progress``, - ``searching``, ``completed``, ``incomplete`` or ``failed``,. Required. Is one of the following - types: Literal[\"in_progress\"], Literal[\"searching\"], Literal[\"completed\"], - Literal[\"incomplete\"], Literal[\"failed\"]""" - results: Optional[list["_models.MemorySearchItem"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The results returned from the memory search.""" + type: Literal[InputContentType.INPUT_FILE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the input item. Always ``input_file``. Required.""" + file_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + filename: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the file to be sent to the model.""" + file_url: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The URL of the file to be sent to the model.""" + file_data: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The content of the file to be sent to the model.""" @overload def __init__( self, *, - id: str, # pylint: disable=redefined-builtin - status: Literal["in_progress", "searching", "completed", "incomplete", "failed"], - created_by: Optional["_models.CreatedBy"] = None, - results: Optional[list["_models.MemorySearchItem"]] = None, + file_id: Optional[str] = None, + filename: Optional[str] = None, + file_url: Optional[str] = None, + file_data: Optional[str] = None, ) -> None: ... @overload @@ -8200,28 +6468,38 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ItemType.MEMORY_SEARCH_CALL # type: ignore + self.type = InputContentType.INPUT_FILE # type: ignore -class MemoryStoreDefinition(_Model): - """Base definition for memory store configurations. - - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - MemoryStoreDefaultDefinition +class InputContentInputImageContent(InputContent, discriminator="input_image"): + """Input image. - :ivar kind: The kind of the memory store. Required. "default" - :vartype kind: str or ~azure.ai.projects.models.MemoryStoreKind + :ivar type: The type of the input item. Always ``input_image``. Required. + :vartype type: str or ~azure.ai.projects.models.INPUT_IMAGE + :ivar image_url: + :vartype image_url: str + :ivar file_id: + :vartype file_id: str + :ivar detail: The detail level of the image to be sent to the model. One of ``high``, ``low``, + or ``auto``. Defaults to ``auto``. Required. Known values are: "low", "high", and "auto". + :vartype detail: str or ~azure.ai.projects.models.ImageDetail """ - __mapping__: dict[str, _Model] = {} - kind: str = rest_discriminator(name="kind", visibility=["read", "create", "update", "delete", "query"]) - """The kind of the memory store. Required. \"default\"""" + type: Literal[InputContentType.INPUT_IMAGE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the input item. Always ``input_image``. Required.""" + image_url: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + file_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + detail: Union[str, "_models.ImageDetail"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The detail level of the image to be sent to the model. One of ``high``, ``low``, or ``auto``. + Defaults to ``auto``. Required. Known values are: \"low\", \"high\", and \"auto\".""" @overload def __init__( self, *, - kind: str, + detail: Union[str, "_models.ImageDetail"], + image_url: Optional[str] = None, + file_id: Optional[str] = None, ) -> None: ... @overload @@ -8233,40 +6511,28 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) + self.type = InputContentType.INPUT_IMAGE # type: ignore -class MemoryStoreDefaultDefinition(MemoryStoreDefinition, discriminator="default"): - """Default memory store implementation. +class InputContentInputTextContent(InputContent, discriminator="input_text"): + """Input text. - :ivar kind: The kind of the memory store. Required. The default memory store implementation. - :vartype kind: str or ~azure.ai.projects.models.DEFAULT - :ivar chat_model: The name or identifier of the chat completion model deployment used for - memory processing. Required. - :vartype chat_model: str - :ivar embedding_model: The name or identifier of the embedding model deployment used for memory - processing. Required. - :vartype embedding_model: str - :ivar options: Default memory store options. - :vartype options: ~azure.ai.projects.models.MemoryStoreDefaultOptions + :ivar type: The type of the input item. Always ``input_text``. Required. + :vartype type: str or ~azure.ai.projects.models.INPUT_TEXT + :ivar text: The text input to the model. Required. + :vartype text: str """ - kind: Literal[MemoryStoreKind.DEFAULT] = rest_discriminator(name="kind", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The kind of the memory store. Required. The default memory store implementation.""" - chat_model: str = rest_field(visibility=["read", "create"]) - """The name or identifier of the chat completion model deployment used for memory processing. - Required.""" - embedding_model: str = rest_field(visibility=["read", "create"]) - """The name or identifier of the embedding model deployment used for memory processing. Required.""" - options: Optional["_models.MemoryStoreDefaultOptions"] = rest_field(visibility=["read", "create"]) - """Default memory store options.""" + type: Literal[InputContentType.INPUT_TEXT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the input item. Always ``input_text``. Required.""" + text: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The text input to the model. Required.""" @overload def __init__( self, *, - chat_model: str, - embedding_model: str, - options: Optional["_models.MemoryStoreDefaultOptions"] = None, + text: str, ) -> None: ... @overload @@ -8278,37 +6544,40 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.kind = MemoryStoreKind.DEFAULT # type: ignore + self.type = InputContentType.INPUT_TEXT # type: ignore -class MemoryStoreDefaultOptions(_Model): - """Default memory store configurations. +class InputFileContentParam(_Model): + """Input file. - :ivar user_profile_enabled: Whether to enable user profile extraction and storage. Default is - true. Required. - :vartype user_profile_enabled: bool - :ivar user_profile_details: Specific categories or types of user profile information to extract - and store. - :vartype user_profile_details: str - :ivar chat_summary_enabled: Whether to enable chat summary extraction and storage. Default is - true. Required. - :vartype chat_summary_enabled: bool + :ivar type: The type of the input item. Always ``input_file``. Required. Default value is + "input_file". + :vartype type: str + :ivar file_id: + :vartype file_id: str + :ivar filename: + :vartype filename: str + :ivar file_data: + :vartype file_data: str + :ivar file_url: + :vartype file_url: str """ - user_profile_enabled: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Whether to enable user profile extraction and storage. Default is true. Required.""" - user_profile_details: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Specific categories or types of user profile information to extract and store.""" - chat_summary_enabled: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Whether to enable chat summary extraction and storage. Default is true. Required.""" + type: Literal["input_file"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The type of the input item. Always ``input_file``. Required. Default value is \"input_file\".""" + file_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + filename: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + file_data: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + file_url: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) @overload def __init__( self, *, - user_profile_enabled: bool, - chat_summary_enabled: bool, - user_profile_details: Optional[str] = None, + file_id: Optional[str] = None, + filename: Optional[str] = None, + file_data: Optional[str] = None, + file_url: Optional[str] = None, ) -> None: ... @overload @@ -8320,41 +6589,39 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) + self.type: Literal["input_file"] = "input_file" -class MemoryStoreDeleteScopeResult(_Model): - """Response for deleting memories from a scope. +class InputImageContentParamAutoParam(_Model): + """Input image. - :ivar object: The object type. Always 'memory_store.scope.deleted'. Required. Default value is - "memory_store.scope.deleted". - :vartype object: str - :ivar name: The name of the memory store. Required. - :vartype name: str - :ivar scope: The scope from which memories were deleted. Required. - :vartype scope: str - :ivar deleted: Whether the deletion operation was successful. Required. - :vartype deleted: bool + :ivar type: The type of the input item. Always ``input_image``. Required. Default value is + "input_image". + :vartype type: str + :ivar image_url: + :vartype image_url: str + :ivar file_id: + :vartype file_id: str + :ivar detail: Known values are: "low", "high", and "auto". + :vartype detail: str or ~azure.ai.projects.models.DetailEnum """ - object: Literal["memory_store.scope.deleted"] = rest_field( + type: Literal["input_image"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The type of the input item. Always ``input_image``. Required. Default value is \"input_image\".""" + image_url: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + file_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + detail: Optional[Union[str, "_models.DetailEnum"]] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) - """The object type. Always 'memory_store.scope.deleted'. Required. Default value is - \"memory_store.scope.deleted\".""" - name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The name of the memory store. Required.""" - scope: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The scope from which memories were deleted. Required.""" - deleted: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Whether the deletion operation was successful. Required.""" + """Known values are: \"low\", \"high\", and \"auto\".""" @overload def __init__( self, *, - name: str, - scope: str, - deleted: bool, + image_url: Optional[str] = None, + file_id: Optional[str] = None, + detail: Optional[Union[str, "_models.DetailEnum"]] = None, ) -> None: ... @overload @@ -8366,64 +6633,49 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.object: Literal["memory_store.scope.deleted"] = "memory_store.scope.deleted" + self.type: Literal["input_image"] = "input_image" -class MemoryStoreDetails(_Model): - """A memory store that can store and retrieve user memories. +class InputItemApplyPatchToolCallItemParam(InputItem, discriminator="apply_patch_call"): + """Apply patch tool call. - :ivar object: The object type, which is always 'memory_store'. Required. Default value is - "memory_store". - :vartype object: str - :ivar id: The unique identifier of the memory store. Required. + :ivar type: The type of the item. Always ``apply_patch_call``. Required. + :vartype type: str or ~azure.ai.projects.models.APPLY_PATCH_CALL + :ivar id: :vartype id: str - :ivar created_at: The Unix timestamp (seconds) when the memory store was created. Required. - :vartype created_at: ~datetime.datetime - :ivar updated_at: The Unix timestamp (seconds) when the memory store was last updated. - Required. - :vartype updated_at: ~datetime.datetime - :ivar name: The name of the memory store. Required. - :vartype name: str - :ivar description: A human-readable description of the memory store. - :vartype description: str - :ivar metadata: Arbitrary key-value metadata to associate with the memory store. - :vartype metadata: dict[str, str] - :ivar definition: The definition of the memory store. Required. - :vartype definition: ~azure.ai.projects.models.MemoryStoreDefinition + :ivar call_id: The unique ID of the apply patch tool call generated by the model. Required. + :vartype call_id: str + :ivar status: The status of the apply patch tool call. One of ``in_progress`` or ``completed``. + Required. Known values are: "in_progress" and "completed". + :vartype status: str or ~azure.ai.projects.models.ApplyPatchCallStatusParam + :ivar operation: The specific create, delete, or update instruction for the apply_patch tool + call. Required. + :vartype operation: ~azure.ai.projects.models.ApplyPatchOperationParam """ - object: Literal["memory_store"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The object type, which is always 'memory_store'. Required. Default value is \"memory_store\".""" - id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique identifier of the memory store. Required.""" - created_at: datetime.datetime = rest_field( - visibility=["read", "create", "update", "delete", "query"], format="unix-timestamp" + type: Literal[InputItemType.APPLY_PATCH_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the item. Always ``apply_patch_call``. Required.""" + id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of the apply patch tool call generated by the model. Required.""" + status: Union[str, "_models.ApplyPatchCallStatusParam"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] ) - """The Unix timestamp (seconds) when the memory store was created. Required.""" - updated_at: datetime.datetime = rest_field( - visibility=["read", "create", "update", "delete", "query"], format="unix-timestamp" + """The status of the apply patch tool call. One of ``in_progress`` or ``completed``. Required. + Known values are: \"in_progress\" and \"completed\".""" + operation: "_models.ApplyPatchOperationParam" = rest_field( + visibility=["read", "create", "update", "delete", "query"] ) - """The Unix timestamp (seconds) when the memory store was last updated. Required.""" - name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The name of the memory store. Required.""" - description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """A human-readable description of the memory store.""" - metadata: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Arbitrary key-value metadata to associate with the memory store.""" - definition: "_models.MemoryStoreDefinition" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The definition of the memory store. Required.""" + """The specific create, delete, or update instruction for the apply_patch tool call. Required.""" @overload - def __init__( - self, - *, - id: str, # pylint: disable=redefined-builtin - created_at: datetime.datetime, - updated_at: datetime.datetime, - name: str, - definition: "_models.MemoryStoreDefinition", - description: Optional[str] = None, - metadata: Optional[dict[str, str]] = None, + def __init__( + self, + *, + call_id: str, + status: Union[str, "_models.ApplyPatchCallStatusParam"], + operation: "_models.ApplyPatchOperationParam", + id: Optional[str] = None, # pylint: disable=redefined-builtin ) -> None: ... @overload @@ -8435,55 +6687,47 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.object: Literal["memory_store"] = "memory_store" + self.type = InputItemType.APPLY_PATCH_CALL # type: ignore -class MemoryStoreOperationUsage(_Model): - """Usage statistics of a memory store operation. +class InputItemApplyPatchToolCallOutputItemParam( + InputItem, discriminator="apply_patch_call_output" +): # pylint: disable=name-too-long + """Apply patch tool call output. - :ivar embedding_tokens: The number of embedding tokens. Required. - :vartype embedding_tokens: int - :ivar input_tokens: The number of input tokens. Required. - :vartype input_tokens: int - :ivar input_tokens_details: A detailed breakdown of the input tokens. Required. - :vartype input_tokens_details: - ~azure.ai.projects.models.MemoryStoreOperationUsageInputTokensDetails - :ivar output_tokens: The number of output tokens. Required. - :vartype output_tokens: int - :ivar output_tokens_details: A detailed breakdown of the output tokens. Required. - :vartype output_tokens_details: - ~azure.ai.projects.models.MemoryStoreOperationUsageOutputTokensDetails - :ivar total_tokens: The total number of tokens used. Required. - :vartype total_tokens: int + :ivar type: The type of the item. Always ``apply_patch_call_output``. Required. + :vartype type: str or ~azure.ai.projects.models.APPLY_PATCH_CALL_OUTPUT + :ivar id: + :vartype id: str + :ivar call_id: The unique ID of the apply patch tool call generated by the model. Required. + :vartype call_id: str + :ivar status: The status of the apply patch tool call output. One of ``completed`` or + ``failed``. Required. Known values are: "completed" and "failed". + :vartype status: str or ~azure.ai.projects.models.ApplyPatchCallOutputStatusParam + :ivar output: + :vartype output: str """ - embedding_tokens: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The number of embedding tokens. Required.""" - input_tokens: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The number of input tokens. Required.""" - input_tokens_details: "_models.MemoryStoreOperationUsageInputTokensDetails" = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """A detailed breakdown of the input tokens. Required.""" - output_tokens: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The number of output tokens. Required.""" - output_tokens_details: "_models.MemoryStoreOperationUsageOutputTokensDetails" = rest_field( + type: Literal[InputItemType.APPLY_PATCH_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the item. Always ``apply_patch_call_output``. Required.""" + id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of the apply patch tool call generated by the model. Required.""" + status: Union[str, "_models.ApplyPatchCallOutputStatusParam"] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) - """A detailed breakdown of the output tokens. Required.""" - total_tokens: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The total number of tokens used. Required.""" + """The status of the apply patch tool call output. One of ``completed`` or ``failed``. Required. + Known values are: \"completed\" and \"failed\".""" + output: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) @overload def __init__( self, *, - embedding_tokens: int, - input_tokens: int, - input_tokens_details: "_models.MemoryStoreOperationUsageInputTokensDetails", - output_tokens: int, - output_tokens_details: "_models.MemoryStoreOperationUsageOutputTokensDetails", - total_tokens: int, + call_id: str, + status: Union[str, "_models.ApplyPatchCallOutputStatusParam"], + id: Optional[str] = None, # pylint: disable=redefined-builtin + output: Optional[str] = None, ) -> None: ... @overload @@ -8495,25 +6739,60 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) + self.type = InputItemType.APPLY_PATCH_CALL_OUTPUT # type: ignore -class MemoryStoreOperationUsageInputTokensDetails(_Model): # pylint: disable=name-too-long - """MemoryStoreOperationUsageInputTokensDetails. +class InputItemCodeInterpreterToolCall(InputItem, discriminator="code_interpreter_call"): + """Code interpreter tool call. - :ivar cached_tokens: The number of tokens that were retrieved from the cache. - `More on prompt caching `_. Required. - :vartype cached_tokens: int + :ivar type: The type of the code interpreter tool call. Always ``code_interpreter_call``. + Required. + :vartype type: str or ~azure.ai.projects.models.CODE_INTERPRETER_CALL + :ivar id: The unique ID of the code interpreter tool call. Required. + :vartype id: str + :ivar status: The status of the code interpreter tool call. Valid values are ``in_progress``, + ``completed``, ``incomplete``, ``interpreting``, and ``failed``. Required. Is one of the + following types: Literal["in_progress"], Literal["completed"], Literal["incomplete"], + Literal["interpreting"], Literal["failed"] + :vartype status: str or str or str or str or str + :ivar container_id: The ID of the container used to run the code. Required. + :vartype container_id: str + :ivar code: Required. + :vartype code: str + :ivar outputs: Required. + :vartype outputs: list[~azure.ai.projects.models.CodeInterpreterOutputLogs or + ~azure.ai.projects.models.CodeInterpreterOutputImage] """ - cached_tokens: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The number of tokens that were retrieved from the cache. - `More on prompt caching `_. Required.""" + type: Literal[InputItemType.CODE_INTERPRETER_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the code interpreter tool call. Always ``code_interpreter_call``. Required.""" + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of the code interpreter tool call. Required.""" + status: Literal["in_progress", "completed", "incomplete", "interpreting", "failed"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The status of the code interpreter tool call. Valid values are ``in_progress``, ``completed``, + ``incomplete``, ``interpreting``, and ``failed``. Required. Is one of the following types: + Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"], + Literal[\"interpreting\"], Literal[\"failed\"]""" + container_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the container used to run the code. Required.""" + code: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required.""" + outputs: list[Union["_models.CodeInterpreterOutputLogs", "_models.CodeInterpreterOutputImage"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Required.""" @overload def __init__( self, *, - cached_tokens: int, + id: str, # pylint: disable=redefined-builtin + status: Literal["in_progress", "completed", "incomplete", "interpreting", "failed"], + container_id: str, + code: str, + outputs: list[Union["_models.CodeInterpreterOutputLogs", "_models.CodeInterpreterOutputImage"]], ) -> None: ... @overload @@ -8525,23 +6804,32 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) + self.type = InputItemType.CODE_INTERPRETER_CALL # type: ignore -class MemoryStoreOperationUsageOutputTokensDetails(_Model): # pylint: disable=name-too-long - """MemoryStoreOperationUsageOutputTokensDetails. +class InputItemCompactionSummaryItemParam(InputItem, discriminator="compaction"): + """Compaction item. - :ivar reasoning_tokens: The number of reasoning tokens. Required. - :vartype reasoning_tokens: int + :ivar id: + :vartype id: str + :ivar type: The type of the item. Always ``compaction``. Required. + :vartype type: str or ~azure.ai.projects.models.COMPACTION + :ivar encrypted_content: The encrypted content of the compaction summary. Required. + :vartype encrypted_content: str """ - reasoning_tokens: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The number of reasoning tokens. Required.""" + id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + type: Literal[InputItemType.COMPACTION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the item. Always ``compaction``. Required.""" + encrypted_content: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The encrypted content of the compaction summary. Required.""" @overload def __init__( self, *, - reasoning_tokens: int, + encrypted_content: str, + id: Optional[str] = None, # pylint: disable=redefined-builtin ) -> None: ... @overload @@ -8553,35 +6841,52 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) + self.type = InputItemType.COMPACTION # type: ignore -class MemoryStoreSearchResult(_Model): - """Memory search response. +class InputItemComputerCallOutputItemParam(InputItem, discriminator="computer_call_output"): + """Computer tool call output. - :ivar search_id: The unique ID of this search request. Use this value as previous_search_id in - subsequent requests to perform incremental searches. Required. - :vartype search_id: str - :ivar memories: Related memory items found during the search operation. Required. - :vartype memories: list[~azure.ai.projects.models.MemorySearchItem] - :ivar usage: Usage statistics associated with the memory search operation. Required. - :vartype usage: ~azure.ai.projects.models.MemoryStoreOperationUsage + :ivar id: + :vartype id: str + :ivar call_id: The ID of the computer tool call that produced the output. Required. + :vartype call_id: str + :ivar type: The type of the computer tool call output. Always ``computer_call_output``. + Required. + :vartype type: str or ~azure.ai.projects.models.COMPUTER_CALL_OUTPUT + :ivar output: Required. + :vartype output: ~azure.ai.projects.models.ComputerScreenshotImage + :ivar acknowledged_safety_checks: + :vartype acknowledged_safety_checks: + list[~azure.ai.projects.models.ComputerCallSafetyCheckParam] + :ivar status: Known values are: "in_progress", "completed", and "incomplete". + :vartype status: str or ~azure.ai.projects.models.FunctionCallItemStatus """ - search_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique ID of this search request. Use this value as previous_search_id in subsequent - requests to perform incremental searches. Required.""" - memories: list["_models.MemorySearchItem"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Related memory items found during the search operation. Required.""" - usage: "_models.MemoryStoreOperationUsage" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Usage statistics associated with the memory search operation. Required.""" + id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the computer tool call that produced the output. Required.""" + type: Literal[InputItemType.COMPUTER_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the computer tool call output. Always ``computer_call_output``. Required.""" + output: "_models.ComputerScreenshotImage" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required.""" + acknowledged_safety_checks: Optional[list["_models.ComputerCallSafetyCheckParam"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + status: Optional[Union[str, "_models.FunctionCallItemStatus"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Known values are: \"in_progress\", \"completed\", and \"incomplete\".""" @overload def __init__( self, *, - search_id: str, - memories: list["_models.MemorySearchItem"], - usage: "_models.MemoryStoreOperationUsage", + call_id: str, + output: "_models.ComputerScreenshotImage", + id: Optional[str] = None, # pylint: disable=redefined-builtin + acknowledged_safety_checks: Optional[list["_models.ComputerCallSafetyCheckParam"]] = None, + status: Optional[Union[str, "_models.FunctionCallItemStatus"]] = None, ) -> None: ... @overload @@ -8593,31 +6898,56 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) + self.type = InputItemType.COMPUTER_CALL_OUTPUT # type: ignore -class MemoryStoreUpdateCompletedResult(_Model): - """Memory update result. +class InputItemComputerToolCall(InputItem, discriminator="computer_call"): + """Computer tool call. - :ivar memory_operations: A list of individual memory operations that were performed during the - update. Required. - :vartype memory_operations: list[~azure.ai.projects.models.MemoryOperation] - :ivar usage: Usage statistics associated with the memory update operation. Required. - :vartype usage: ~azure.ai.projects.models.MemoryStoreOperationUsage + :ivar type: The type of the computer call. Always ``computer_call``. Required. + :vartype type: str or ~azure.ai.projects.models.COMPUTER_CALL + :ivar id: The unique ID of the computer call. Required. + :vartype id: str + :ivar call_id: An identifier used when responding to the tool call with output. Required. + :vartype call_id: str + :ivar action: Required. + :vartype action: ~azure.ai.projects.models.ComputerAction + :ivar pending_safety_checks: The pending safety checks for the computer call. Required. + :vartype pending_safety_checks: list[~azure.ai.projects.models.ComputerCallSafetyCheckParam] + :ivar status: The status of the item. One of ``in_progress``, ``completed``, or + ``incomplete``. Populated when items are returned via API. Required. Is one of the following + types: Literal["in_progress"], Literal["completed"], Literal["incomplete"] + :vartype status: str or str or str """ - memory_operations: list["_models.MemoryOperation"] = rest_field( + type: Literal[InputItemType.COMPUTER_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the computer call. Always ``computer_call``. Required.""" + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of the computer call. Required.""" + call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """An identifier used when responding to the tool call with output. Required.""" + action: "_models.ComputerAction" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required.""" + pending_safety_checks: list["_models.ComputerCallSafetyCheckParam"] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) - """A list of individual memory operations that were performed during the update. Required.""" - usage: "_models.MemoryStoreOperationUsage" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Usage statistics associated with the memory update operation. Required.""" + """The pending safety checks for the computer call. Required.""" + status: Literal["in_progress", "completed", "incomplete"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The status of the item. One of ``in_progress``, ``completed``, or + ``incomplete``. Populated when items are returned via API. Required. Is one of the following + types: Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"]""" @overload def __init__( self, *, - memory_operations: list["_models.MemoryOperation"], - usage: "_models.MemoryStoreOperationUsage", + id: str, # pylint: disable=redefined-builtin + call_id: str, + action: "_models.ComputerAction", + pending_safety_checks: list["_models.ComputerCallSafetyCheckParam"], + status: Literal["in_progress", "completed", "incomplete"], ) -> None: ... @overload @@ -8629,53 +6959,43 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) + self.type = InputItemType.COMPUTER_CALL # type: ignore -class MemoryStoreUpdateResult(_Model): - """Provides the status of a memory store update operation. +class InputItemCustomToolCall(InputItem, discriminator="custom_tool_call"): + """Custom tool call. - :ivar update_id: The unique ID of this update request. Use this value as previous_update_id in - subsequent requests to perform incremental updates. Required. - :vartype update_id: str - :ivar status: The status of the memory update operation. One of "queued", "in_progress", - "completed", "failed", or "superseded". Required. Known values are: "queued", "in_progress", - "completed", "failed", and "superseded". - :vartype status: str or ~azure.ai.projects.models.MemoryStoreUpdateStatus - :ivar superseded_by: The update_id the operation was superseded by when status is "superseded". - :vartype superseded_by: str - :ivar result: The result of memory store update operation when status is "completed". - :vartype result: ~azure.ai.projects.models.MemoryStoreUpdateCompletedResult - :ivar error: Error object that describes the error when status is "failed". - :vartype error: ~azure.ai.projects.models.Error + :ivar type: The type of the custom tool call. Always ``custom_tool_call``. Required. + :vartype type: str or ~azure.ai.projects.models.CUSTOM_TOOL_CALL + :ivar id: The unique ID of the custom tool call in the OpenAI platform. + :vartype id: str + :ivar call_id: An identifier used to map this custom tool call to a tool call output. Required. + :vartype call_id: str + :ivar name: The name of the custom tool being called. Required. + :vartype name: str + :ivar input: The input for the custom tool call generated by the model. Required. + :vartype input: str """ - update_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique ID of this update request. Use this value as previous_update_id in subsequent - requests to perform incremental updates. Required.""" - status: Union[str, "_models.MemoryStoreUpdateStatus"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The status of the memory update operation. One of \"queued\", \"in_progress\", \"completed\", - \"failed\", or \"superseded\". Required. Known values are: \"queued\", \"in_progress\", - \"completed\", \"failed\", and \"superseded\".""" - superseded_by: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The update_id the operation was superseded by when status is \"superseded\".""" - result: Optional["_models.MemoryStoreUpdateCompletedResult"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The result of memory store update operation when status is \"completed\".""" - error: Optional["_models.Error"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Error object that describes the error when status is \"failed\".""" + type: Literal[InputItemType.CUSTOM_TOOL_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the custom tool call. Always ``custom_tool_call``. Required.""" + id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of the custom tool call in the OpenAI platform.""" + call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """An identifier used to map this custom tool call to a tool call output. Required.""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the custom tool being called. Required.""" + input: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The input for the custom tool call generated by the model. Required.""" @overload def __init__( self, *, - update_id: str, - status: Union[str, "_models.MemoryStoreUpdateStatus"], - superseded_by: Optional[str] = None, - result: Optional["_models.MemoryStoreUpdateCompletedResult"] = None, - error: Optional["_models.Error"] = None, + call_id: str, + name: str, + input: str, + id: Optional[str] = None, # pylint: disable=redefined-builtin ) -> None: ... @overload @@ -8687,29 +7007,46 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) + self.type = InputItemType.CUSTOM_TOOL_CALL # type: ignore -class MicrosoftFabricAgentTool(Tool, discriminator="fabric_dataagent_preview"): - """The input definition information for a Microsoft Fabric tool as used to configure an agent. +class InputItemCustomToolCallOutput(InputItem, discriminator="custom_tool_call_output"): + """Custom tool call output. - :ivar type: The object type, which is always 'fabric_dataagent'. Required. - :vartype type: str or ~azure.ai.projects.models.FABRIC_DATAAGENT_PREVIEW - :ivar fabric_dataagent_preview: The fabric data agent tool parameters. Required. - :vartype fabric_dataagent_preview: ~azure.ai.projects.models.FabricDataAgentToolParameters + :ivar type: The type of the custom tool call output. Always ``custom_tool_call_output``. + Required. + :vartype type: str or ~azure.ai.projects.models.CUSTOM_TOOL_CALL_OUTPUT + :ivar id: The unique ID of the custom tool call output in the OpenAI platform. + :vartype id: str + :ivar call_id: The call ID, used to map this custom tool call output to a custom tool call. + Required. + :vartype call_id: str + :ivar output: The output from the custom tool call generated by your code. + Can be a string or an list of output content. Required. Is either a str type or a + [FunctionAndCustomToolCallOutput] type. + :vartype output: str or list[~azure.ai.projects.models.FunctionAndCustomToolCallOutput] """ - type: Literal[ToolType.FABRIC_DATAAGENT_PREVIEW] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The object type, which is always 'fabric_dataagent'. Required.""" - fabric_dataagent_preview: "_models.FabricDataAgentToolParameters" = rest_field( + type: Literal[InputItemType.CUSTOM_TOOL_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the custom tool call output. Always ``custom_tool_call_output``. Required.""" + id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of the custom tool call output in the OpenAI platform.""" + call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The call ID, used to map this custom tool call output to a custom tool call. Required.""" + output: Union[str, list["_models.FunctionAndCustomToolCallOutput"]] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) - """The fabric data agent tool parameters. Required.""" + """The output from the custom tool call generated by your code. + Can be a string or an list of output content. Required. Is either a str type or a + [FunctionAndCustomToolCallOutput] type.""" @overload def __init__( self, *, - fabric_dataagent_preview: "_models.FabricDataAgentToolParameters", + call_id: str, + output: Union[str, list["_models.FunctionAndCustomToolCallOutput"]], + id: Optional[str] = None, # pylint: disable=redefined-builtin ) -> None: ... @overload @@ -8721,48 +7058,126 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ToolType.FABRIC_DATAAGENT_PREVIEW # type: ignore + self.type = InputItemType.CUSTOM_TOOL_CALL_OUTPUT # type: ignore -class ModelDeployment(Deployment, discriminator="ModelDeployment"): - """Model Deployment Definition. +class InputItemFileSearchToolCall(InputItem, discriminator="file_search_call"): + """File search tool call. - :ivar name: Name of the deployment. Required. - :vartype name: str - :ivar type: The type of the deployment. Required. Model deployment - :vartype type: str or ~azure.ai.projects.models.MODEL_DEPLOYMENT - :ivar model_name: Publisher-specific name of the deployed model. Required. - :vartype model_name: str - :ivar model_version: Publisher-specific version of the deployed model. Required. - :vartype model_version: str - :ivar model_publisher: Name of the deployed model's publisher. Required. - :vartype model_publisher: str - :ivar capabilities: Capabilities of deployed model. Required. - :vartype capabilities: dict[str, str] - :ivar sku: Sku of the model deployment. Required. - :vartype sku: ~azure.ai.projects.models.ModelDeploymentSku - :ivar connection_name: Name of the connection the deployment comes from. - :vartype connection_name: str + :ivar id: The unique ID of the file search tool call. Required. + :vartype id: str + :ivar type: The type of the file search tool call. Always ``file_search_call``. Required. + :vartype type: str or ~azure.ai.projects.models.FILE_SEARCH_CALL + :ivar status: The status of the file search tool call. One of ``in_progress``, + ``searching``, ``incomplete`` or ``failed``,. Required. Is one of the following types: + Literal["in_progress"], Literal["searching"], Literal["completed"], Literal["incomplete"], + Literal["failed"] + :vartype status: str or str or str or str or str + :ivar queries: The queries used to search for files. Required. + :vartype queries: list[str] + :ivar results: + :vartype results: list[~azure.ai.projects.models.FileSearchToolCallResults] + """ + + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of the file search tool call. Required.""" + type: Literal[InputItemType.FILE_SEARCH_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the file search tool call. Always ``file_search_call``. Required.""" + status: Literal["in_progress", "searching", "completed", "incomplete", "failed"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The status of the file search tool call. One of ``in_progress``, + ``searching``, ``incomplete`` or ``failed``,. Required. Is one of the following types: + Literal[\"in_progress\"], Literal[\"searching\"], Literal[\"completed\"], + Literal[\"incomplete\"], Literal[\"failed\"]""" + queries: list[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The queries used to search for files. Required.""" + results: Optional[list["_models.FileSearchToolCallResults"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + status: Literal["in_progress", "searching", "completed", "incomplete", "failed"], + queries: list[str], + results: Optional[list["_models.FileSearchToolCallResults"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = InputItemType.FILE_SEARCH_CALL # type: ignore + + +class InputItemFunctionCallOutputItemParam(InputItem, discriminator="function_call_output"): + """Function tool call output. + + :ivar id: + :vartype id: str + :ivar call_id: The unique ID of the function tool call generated by the model. Required. + :vartype call_id: str + :ivar type: The type of the function tool call output. Always ``function_call_output``. + Required. + :vartype type: str or ~azure.ai.projects.models.FUNCTION_CALL_OUTPUT + :ivar output: Text, image, or file output of the function tool call. Required. Is either a str + type or a [Union["_models.InputTextContentParam", "_models.InputImageContentParamAutoParam", + "_models.InputFileContentParam"]] type. + :vartype output: str or list[~azure.ai.projects.models.InputTextContentParam or + ~azure.ai.projects.models.InputImageContentParamAutoParam or + ~azure.ai.projects.models.InputFileContentParam] + :ivar status: Known values are: "in_progress", "completed", and "incomplete". + :vartype status: str or ~azure.ai.projects.models.FunctionCallItemStatus """ - type: Literal[DeploymentType.MODEL_DEPLOYMENT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the deployment. Required. Model deployment""" - model_name: str = rest_field(name="modelName", visibility=["read"]) - """Publisher-specific name of the deployed model. Required.""" - model_version: str = rest_field(name="modelVersion", visibility=["read"]) - """Publisher-specific version of the deployed model. Required.""" - model_publisher: str = rest_field(name="modelPublisher", visibility=["read"]) - """Name of the deployed model's publisher. Required.""" - capabilities: dict[str, str] = rest_field(visibility=["read"]) - """Capabilities of deployed model. Required.""" - sku: "_models.ModelDeploymentSku" = rest_field(visibility=["read"]) - """Sku of the model deployment. Required.""" - connection_name: Optional[str] = rest_field(name="connectionName", visibility=["read"]) - """Name of the connection the deployment comes from.""" + id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of the function tool call generated by the model. Required.""" + type: Literal[InputItemType.FUNCTION_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the function tool call output. Always ``function_call_output``. Required.""" + output: Union[ + str, + list[ + Union[ + "_models.InputTextContentParam", + "_models.InputImageContentParamAutoParam", + "_models.InputFileContentParam", + ] + ], + ] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Text, image, or file output of the function tool call. Required. Is either a str type or a + [Union[\"_models.InputTextContentParam\", \"_models.InputImageContentParamAutoParam\", + \"_models.InputFileContentParam\"]] type.""" + status: Optional[Union[str, "_models.FunctionCallItemStatus"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Known values are: \"in_progress\", \"completed\", and \"incomplete\".""" @overload def __init__( self, + *, + call_id: str, + output: Union[ + str, + list[ + Union[ + "_models.InputTextContentParam", + "_models.InputImageContentParamAutoParam", + "_models.InputFileContentParam", + ] + ], + ], + id: Optional[str] = None, # pylint: disable=redefined-builtin + status: Optional[Union[str, "_models.FunctionCallItemStatus"]] = None, ) -> None: ... @overload @@ -8774,44 +7189,44 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = DeploymentType.MODEL_DEPLOYMENT # type: ignore + self.type = InputItemType.FUNCTION_CALL_OUTPUT # type: ignore -class ModelDeploymentSku(_Model): - """Sku information. +class InputItemFunctionShellCallItemParam(InputItem, discriminator="shell_call"): + """Shell tool call. - :ivar capacity: Sku capacity. Required. - :vartype capacity: int - :ivar family: Sku family. Required. - :vartype family: str - :ivar name: Sku name. Required. - :vartype name: str - :ivar size: Sku size. Required. - :vartype size: str - :ivar tier: Sku tier. Required. - :vartype tier: str + :ivar id: + :vartype id: str + :ivar call_id: The unique ID of the shell tool call generated by the model. Required. + :vartype call_id: str + :ivar type: The type of the item. Always ``shell_call``. Required. + :vartype type: str or ~azure.ai.projects.models.SHELL_CALL + :ivar action: The shell commands and limits that describe how to run the tool call. Required. + :vartype action: ~azure.ai.projects.models.FunctionShellActionParam + :ivar status: Known values are: "in_progress", "completed", and "incomplete". + :vartype status: str or ~azure.ai.projects.models.FunctionShellCallItemStatus """ - capacity: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Sku capacity. Required.""" - family: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Sku family. Required.""" - name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Sku name. Required.""" - size: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Sku size. Required.""" - tier: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Sku tier. Required.""" + id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of the shell tool call generated by the model. Required.""" + type: Literal[InputItemType.SHELL_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the item. Always ``shell_call``. Required.""" + action: "_models.FunctionShellActionParam" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The shell commands and limits that describe how to run the tool call. Required.""" + status: Optional[Union[str, "_models.FunctionShellCallItemStatus"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Known values are: \"in_progress\", \"completed\", and \"incomplete\".""" @overload def __init__( self, *, - capacity: int, - family: str, - name: str, - size: str, - tier: str, + call_id: str, + action: "_models.FunctionShellActionParam", + id: Optional[str] = None, # pylint: disable=redefined-builtin + status: Optional[Union[str, "_models.FunctionShellCallItemStatus"]] = None, ) -> None: ... @overload @@ -8823,29 +7238,46 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) + self.type = InputItemType.SHELL_CALL # type: ignore -class MonthlyRecurrenceSchedule(RecurrenceSchedule, discriminator="Monthly"): - """Monthly recurrence schedule. +class InputItemFunctionShellCallOutputItemParam( + InputItem, discriminator="shell_call_output" +): # pylint: disable=name-too-long + """Shell tool call output. - :ivar type: Monthly recurrence type. Required. Monthly recurrence pattern. - :vartype type: str or ~azure.ai.projects.models.MONTHLY - :ivar days_of_month: Days of the month for the recurrence schedule. Required. - :vartype days_of_month: list[int] + :ivar id: + :vartype id: str + :ivar call_id: The unique ID of the shell tool call generated by the model. Required. + :vartype call_id: str + :ivar type: The type of the item. Always ``shell_call_output``. Required. + :vartype type: str or ~azure.ai.projects.models.SHELL_CALL_OUTPUT + :ivar output: Captured chunks of stdout and stderr output, along with their associated + outcomes. Required. + :vartype output: list[~azure.ai.projects.models.FunctionShellCallOutputContentParam] + :ivar max_output_length: + :vartype max_output_length: int """ - type: Literal[RecurrenceType.MONTHLY] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Monthly recurrence type. Required. Monthly recurrence pattern.""" - days_of_month: list[int] = rest_field( - name="daysOfMonth", visibility=["read", "create", "update", "delete", "query"] + id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of the shell tool call generated by the model. Required.""" + type: Literal[InputItemType.SHELL_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the item. Always ``shell_call_output``. Required.""" + output: list["_models.FunctionShellCallOutputContentParam"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] ) - """Days of the month for the recurrence schedule. Required.""" + """Captured chunks of stdout and stderr output, along with their associated outcomes. Required.""" + max_output_length: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) @overload def __init__( self, *, - days_of_month: list[int], + call_id: str, + output: list["_models.FunctionShellCallOutputContentParam"], + id: Optional[str] = None, # pylint: disable=redefined-builtin + max_output_length: Optional[int] = None, ) -> None: ... @overload @@ -8857,22 +7289,54 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = RecurrenceType.MONTHLY # type: ignore + self.type = InputItemType.SHELL_CALL_OUTPUT # type: ignore -class NoAuthenticationCredentials(BaseCredentials, discriminator="None"): - """Credentials that do not require authentication. +class InputItemFunctionToolCall(InputItem, discriminator="function_call"): + """Function tool call. - :ivar type: The credential type. Required. No credential - :vartype type: str or ~azure.ai.projects.models.NONE + :ivar id: The unique ID of the function tool call. + :vartype id: str + :ivar type: The type of the function tool call. Always ``function_call``. Required. + :vartype type: str or ~azure.ai.projects.models.FUNCTION_CALL + :ivar call_id: The unique ID of the function tool call generated by the model. Required. + :vartype call_id: str + :ivar name: The name of the function to run. Required. + :vartype name: str + :ivar arguments: A JSON string of the arguments to pass to the function. Required. + :vartype arguments: str + :ivar status: The status of the item. One of ``in_progress``, ``completed``, or + ``incomplete``. Populated when items are returned via API. Is one of the following types: + Literal["in_progress"], Literal["completed"], Literal["incomplete"] + :vartype status: str or str or str """ - type: Literal[CredentialType.NONE] = rest_discriminator(name="type", visibility=["read"]) # type: ignore - """The credential type. Required. No credential""" + id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of the function tool call.""" + type: Literal[InputItemType.FUNCTION_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the function tool call. Always ``function_call``. Required.""" + call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of the function tool call generated by the model. Required.""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the function to run. Required.""" + arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A JSON string of the arguments to pass to the function. Required.""" + status: Optional[Literal["in_progress", "completed", "incomplete"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The status of the item. One of ``in_progress``, ``completed``, or + ``incomplete``. Populated when items are returned via API. Is one of the following types: + Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"]""" @overload def __init__( self, + *, + call_id: str, + name: str, + arguments: str, + id: Optional[str] = None, # pylint: disable=redefined-builtin + status: Optional[Literal["in_progress", "completed", "incomplete"]] = None, ) -> None: ... @overload @@ -8884,39 +7348,42 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = CredentialType.NONE # type: ignore + self.type = InputItemType.FUNCTION_CALL # type: ignore -class OAuthConsentRequestItemResource(ItemResource, discriminator="oauth_consent_request"): - """Request from the service for the user to perform OAuth consent. +class InputItemImageGenToolCall(InputItem, discriminator="image_generation_call"): + """Image generation call. - :ivar created_by: The information about the creator of the item. - :vartype created_by: ~azure.ai.projects.models.CreatedBy - :ivar id: Required. + :ivar type: The type of the image generation call. Always ``image_generation_call``. Required. + :vartype type: str or ~azure.ai.projects.models.IMAGE_GENERATION_CALL + :ivar id: The unique ID of the image generation call. Required. :vartype id: str - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.OAUTH_CONSENT_REQUEST - :ivar consent_link: The link the user can use to perform OAuth consent. Required. - :vartype consent_link: str - :ivar server_label: The server label for the OAuth consent request. Required. - :vartype server_label: str + :ivar status: The status of the image generation call. Required. Is one of the following types: + Literal["in_progress"], Literal["completed"], Literal["generating"], Literal["failed"] + :vartype status: str or str or str or str + :ivar result: Required. + :vartype result: str """ - type: Literal[ItemType.OAUTH_CONSENT_REQUEST] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + type: Literal[InputItemType.IMAGE_GENERATION_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the image generation call. Always ``image_generation_call``. Required.""" + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of the image generation call. Required.""" + status: Literal["in_progress", "completed", "generating", "failed"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The status of the image generation call. Required. Is one of the following types: + Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"generating\"], Literal[\"failed\"]""" + result: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) """Required.""" - consent_link: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The link the user can use to perform OAuth consent. Required.""" - server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The server label for the OAuth consent request. Required.""" @overload def __init__( self, *, id: str, # pylint: disable=redefined-builtin - consent_link: str, - server_label: str, - created_by: Optional["_models.CreatedBy"] = None, + status: Literal["in_progress", "completed", "generating", "failed"], + result: str, ) -> None: ... @overload @@ -8928,33 +7395,47 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ItemType.OAUTH_CONSENT_REQUEST # type: ignore + self.type = InputItemType.IMAGE_GENERATION_CALL # type: ignore -class OneTimeTrigger(Trigger, discriminator="OneTime"): - """One-time trigger. +class InputItemLocalShellToolCall(InputItem, discriminator="local_shell_call"): + """Local shell call. - :ivar type: Required. One-time trigger. - :vartype type: str or ~azure.ai.projects.models.ONE_TIME - :ivar trigger_at: Date and time for the one-time trigger in ISO 8601 format. Required. - :vartype trigger_at: str - :ivar time_zone: Time zone for the one-time trigger. - :vartype time_zone: str + :ivar type: The type of the local shell call. Always ``local_shell_call``. Required. + :vartype type: str or ~azure.ai.projects.models.LOCAL_SHELL_CALL + :ivar id: The unique ID of the local shell call. Required. + :vartype id: str + :ivar call_id: The unique ID of the local shell tool call generated by the model. Required. + :vartype call_id: str + :ivar action: Required. + :vartype action: ~azure.ai.projects.models.LocalShellExecAction + :ivar status: The status of the local shell call. Required. Is one of the following types: + Literal["in_progress"], Literal["completed"], Literal["incomplete"] + :vartype status: str or str or str """ - type: Literal[TriggerType.ONE_TIME] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required. One-time trigger.""" - trigger_at: str = rest_field(name="triggerAt", visibility=["read", "create", "update", "delete", "query"]) - """Date and time for the one-time trigger in ISO 8601 format. Required.""" - time_zone: Optional[str] = rest_field(name="timeZone", visibility=["read", "create", "update", "delete", "query"]) - """Time zone for the one-time trigger.""" + type: Literal[InputItemType.LOCAL_SHELL_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the local shell call. Always ``local_shell_call``. Required.""" + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of the local shell call. Required.""" + call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of the local shell tool call generated by the model. Required.""" + action: "_models.LocalShellExecAction" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required.""" + status: Literal["in_progress", "completed", "incomplete"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The status of the local shell call. Required. Is one of the following types: + Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"]""" @overload def __init__( self, *, - trigger_at: str, - time_zone: Optional[str] = None, + id: str, # pylint: disable=redefined-builtin + call_id: str, + action: "_models.LocalShellExecAction", + status: Literal["in_progress", "completed", "incomplete"], ) -> None: ... @overload @@ -8966,30 +7447,43 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = TriggerType.ONE_TIME # type: ignore + self.type = InputItemType.LOCAL_SHELL_CALL # type: ignore -class OpenApiAgentTool(Tool, discriminator="openapi"): - """The input definition information for an OpenAPI tool as used to configure an agent. +class InputItemLocalShellToolCallOutput(InputItem, discriminator="local_shell_call_output"): + """Local shell call output. - :ivar type: The object type, which is always 'openapi'. Required. - :vartype type: str or ~azure.ai.projects.models.OPENAPI - :ivar openapi: The openapi function definition. Required. - :vartype openapi: ~azure.ai.projects.models.OpenApiFunctionDefinition + :ivar type: The type of the local shell tool call output. Always ``local_shell_call_output``. + Required. + :vartype type: str or ~azure.ai.projects.models.LOCAL_SHELL_CALL_OUTPUT + :ivar id: The unique ID of the local shell tool call generated by the model. Required. + :vartype id: str + :ivar output: A JSON string of the output of the local shell tool call. Required. + :vartype output: str + :ivar status: Is one of the following types: Literal["in_progress"], Literal["completed"], + Literal["incomplete"] + :vartype status: str or str or str """ - type: Literal[ToolType.OPENAPI] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The object type, which is always 'openapi'. Required.""" - openapi: "_models.OpenApiFunctionDefinition" = rest_field( + type: Literal[InputItemType.LOCAL_SHELL_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the local shell tool call output. Always ``local_shell_call_output``. Required.""" + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of the local shell tool call generated by the model. Required.""" + output: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A JSON string of the output of the local shell tool call. Required.""" + status: Optional[Literal["in_progress", "completed", "incomplete"]] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) - """The openapi function definition. Required.""" + """Is one of the following types: Literal[\"in_progress\"], Literal[\"completed\"], + Literal[\"incomplete\"]""" @overload def __init__( self, *, - openapi: "_models.OpenApiFunctionDefinition", + id: str, # pylint: disable=redefined-builtin + output: str, + status: Optional[Literal["in_progress", "completed", "incomplete"]] = None, ) -> None: ... @overload @@ -9001,30 +7495,43 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ToolType.OPENAPI # type: ignore - + self.type = InputItemType.LOCAL_SHELL_CALL_OUTPUT # type: ignore -class OpenApiAuthDetails(_Model): - """authentication details for OpenApiFunctionDefinition. - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - OpenApiAnonymousAuthDetails, OpenApiManagedAuthDetails, OpenApiProjectConnectionAuthDetails +class InputItemMcpApprovalRequest(InputItem, discriminator="mcp_approval_request"): + """MCP approval request. - :ivar type: The type of authentication, must be anonymous/project_connection/managed_identity. - Required. Known values are: "anonymous", "project_connection", and "managed_identity". - :vartype type: str or ~azure.ai.projects.models.OpenApiAuthType + :ivar type: The type of the item. Always ``mcp_approval_request``. Required. + :vartype type: str or ~azure.ai.projects.models.MCP_APPROVAL_REQUEST + :ivar id: The unique ID of the approval request. Required. + :vartype id: str + :ivar server_label: The label of the MCP server making the request. Required. + :vartype server_label: str + :ivar name: The name of the tool to run. Required. + :vartype name: str + :ivar arguments: A JSON string of arguments for the tool. Required. + :vartype arguments: str """ - __mapping__: dict[str, _Model] = {} - type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) - """The type of authentication, must be anonymous/project_connection/managed_identity. Required. - Known values are: \"anonymous\", \"project_connection\", and \"managed_identity\".""" + type: Literal[InputItemType.MCP_APPROVAL_REQUEST] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the item. Always ``mcp_approval_request``. Required.""" + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of the approval request. Required.""" + server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The label of the MCP server making the request. Required.""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the tool to run. Required.""" + arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A JSON string of arguments for the tool. Required.""" @overload def __init__( self, *, - type: str, + id: str, # pylint: disable=redefined-builtin + server_label: str, + name: str, + arguments: str, ) -> None: ... @overload @@ -9036,21 +7543,41 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) + self.type = InputItemType.MCP_APPROVAL_REQUEST # type: ignore -class OpenApiAnonymousAuthDetails(OpenApiAuthDetails, discriminator="anonymous"): - """Security details for OpenApi anonymous authentication. +class InputItemMcpApprovalResponse(InputItem, discriminator="mcp_approval_response"): + """MCP approval response. - :ivar type: The object type, which is always 'anonymous'. Required. - :vartype type: str or ~azure.ai.projects.models.ANONYMOUS + :ivar type: The type of the item. Always ``mcp_approval_response``. Required. + :vartype type: str or ~azure.ai.projects.models.MCP_APPROVAL_RESPONSE + :ivar id: + :vartype id: str + :ivar approval_request_id: The ID of the approval request being answered. Required. + :vartype approval_request_id: str + :ivar approve: Whether the request was approved. Required. + :vartype approve: bool + :ivar reason: + :vartype reason: str """ - type: Literal[OpenApiAuthType.ANONYMOUS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The object type, which is always 'anonymous'. Required.""" + type: Literal[InputItemType.MCP_APPROVAL_RESPONSE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the item. Always ``mcp_approval_response``. Required.""" + id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + approval_request_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the approval request being answered. Required.""" + approve: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Whether the request was approved. Required.""" + reason: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) @overload def __init__( self, + *, + approval_request_id: str, + approve: bool, + id: Optional[str] = None, # pylint: disable=redefined-builtin + reason: Optional[str] = None, ) -> None: ... @overload @@ -9062,50 +7589,42 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = OpenApiAuthType.ANONYMOUS # type: ignore + self.type = InputItemType.MCP_APPROVAL_RESPONSE # type: ignore -class OpenApiFunctionDefinition(_Model): - """The input definition information for an openapi function. +class InputItemMcpListTools(InputItem, discriminator="mcp_list_tools"): + """MCP list tools. - :ivar name: The name of the function to be called. Required. - :vartype name: str - :ivar description: A description of what the function does, used by the model to choose when - and how to call the function. - :vartype description: str - :ivar spec: The openapi function shape, described as a JSON Schema object. Required. - :vartype spec: any - :ivar auth: Open API authentication details. Required. - :vartype auth: ~azure.ai.projects.models.OpenApiAuthDetails - :ivar default_params: List of OpenAPI spec parameters that will use user-provided defaults. - :vartype default_params: list[str] - :ivar functions: List of function definitions used by OpenApi tool. - :vartype functions: list[~azure.ai.projects.models.OpenApiFunctionDefinitionFunction] + :ivar type: The type of the item. Always ``mcp_list_tools``. Required. + :vartype type: str or ~azure.ai.projects.models.MCP_LIST_TOOLS + :ivar id: The unique ID of the list. Required. + :vartype id: str + :ivar server_label: The label of the MCP server. Required. + :vartype server_label: str + :ivar tools: The tools available on the server. Required. + :vartype tools: list[~azure.ai.projects.models.MCPListToolsTool] + :ivar error: + :vartype error: str """ - name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The name of the function to be called. Required.""" - description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """A description of what the function does, used by the model to choose when and how to call the - function.""" - spec: Any = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The openapi function shape, described as a JSON Schema object. Required.""" - auth: "_models.OpenApiAuthDetails" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Open API authentication details. Required.""" - default_params: Optional[list[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """List of OpenAPI spec parameters that will use user-provided defaults.""" - functions: Optional[list["_models.OpenApiFunctionDefinitionFunction"]] = rest_field(visibility=["read"]) - """List of function definitions used by OpenApi tool.""" + type: Literal[InputItemType.MCP_LIST_TOOLS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the item. Always ``mcp_list_tools``. Required.""" + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of the list. Required.""" + server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The label of the MCP server. Required.""" + tools: list["_models.MCPListToolsTool"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The tools available on the server. Required.""" + error: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) @overload def __init__( self, *, - name: str, - spec: Any, - auth: "_models.OpenApiAuthDetails", - description: Optional[str] = None, - default_params: Optional[list[str]] = None, + id: str, # pylint: disable=redefined-builtin + server_label: str, + tools: list["_models.MCPListToolsTool"], + error: Optional[str] = None, ) -> None: ... @overload @@ -9117,36 +7636,66 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) + self.type = InputItemType.MCP_LIST_TOOLS # type: ignore -class OpenApiFunctionDefinitionFunction(_Model): - """OpenApiFunctionDefinitionFunction. +class InputItemMcpToolCall(InputItem, discriminator="mcp_call"): + """MCP tool call. - :ivar name: The name of the function to be called. Required. + :ivar type: The type of the item. Always ``mcp_call``. Required. + :vartype type: str or ~azure.ai.projects.models.MCP_CALL + :ivar id: The unique ID of the tool call. Required. + :vartype id: str + :ivar server_label: The label of the MCP server running the tool. Required. + :vartype server_label: str + :ivar name: The name of the tool that was run. Required. :vartype name: str - :ivar description: A description of what the function does, used by the model to choose when - and how to call the function. - :vartype description: str - :ivar parameters: The parameters the functions accepts, described as a JSON Schema object. - Required. - :vartype parameters: any + :ivar arguments: A JSON string of the arguments passed to the tool. Required. + :vartype arguments: str + :ivar output: + :vartype output: str + :ivar error: + :vartype error: str + :ivar status: The status of the tool call. One of ``in_progress``, ``completed``, + ``incomplete``, ``calling``, or ``failed``. Known values are: "in_progress", "completed", + "incomplete", "calling", and "failed". + :vartype status: str or ~azure.ai.projects.models.MCPToolCallStatus + :ivar approval_request_id: + :vartype approval_request_id: str """ + type: Literal[InputItemType.MCP_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the item. Always ``mcp_call``. Required.""" + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of the tool call. Required.""" + server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The label of the MCP server running the tool. Required.""" name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The name of the function to be called. Required.""" - description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """A description of what the function does, used by the model to choose when and how to call the - function.""" - parameters: Any = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The parameters the functions accepts, described as a JSON Schema object. Required.""" + """The name of the tool that was run. Required.""" + arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A JSON string of the arguments passed to the tool. Required.""" + output: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + error: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + status: Optional[Union[str, "_models.MCPToolCallStatus"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The status of the tool call. One of ``in_progress``, ``completed``, ``incomplete``, + ``calling``, or ``failed``. Known values are: \"in_progress\", \"completed\", \"incomplete\", + \"calling\", and \"failed\".""" + approval_request_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) @overload def __init__( self, *, + id: str, # pylint: disable=redefined-builtin + server_label: str, name: str, - parameters: Any, - description: Optional[str] = None, + arguments: str, + output: Optional[str] = None, + error: Optional[str] = None, + status: Optional[Union[str, "_models.MCPToolCallStatus"]] = None, + approval_request_id: Optional[str] = None, ) -> None: ... @overload @@ -9158,29 +7707,51 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) + self.type = InputItemType.MCP_CALL # type: ignore -class OpenApiManagedAuthDetails(OpenApiAuthDetails, discriminator="managed_identity"): - """Security details for OpenApi managed_identity authentication. +class InputItemOutputMessage(InputItem, discriminator="output_message"): + """Output message. - :ivar type: The object type, which is always 'managed_identity'. Required. - :vartype type: str or ~azure.ai.projects.models.MANAGED_IDENTITY - :ivar security_scheme: Connection auth security details. Required. - :vartype security_scheme: ~azure.ai.projects.models.OpenApiManagedSecurityScheme + :ivar id: The unique ID of the output message. Required. + :vartype id: str + :ivar type: The type of the output message. Always ``message``. Required. + :vartype type: str or ~azure.ai.projects.models.OUTPUT_MESSAGE + :ivar role: The role of the output message. Always ``assistant``. Required. Default value is + "assistant". + :vartype role: str + :ivar content: The content of the output message. Required. + :vartype content: list[~azure.ai.projects.models.OutputMessageContent] + :ivar status: The status of the message input. One of ``in_progress``, ``completed``, or + ``incomplete``. Populated when input items are returned via API. Required. Is one of the + following types: Literal["in_progress"], Literal["completed"], Literal["incomplete"] + :vartype status: str or str or str """ - type: Literal[OpenApiAuthType.MANAGED_IDENTITY] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The object type, which is always 'managed_identity'. Required.""" - security_scheme: "_models.OpenApiManagedSecurityScheme" = rest_field( + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of the output message. Required.""" + type: Literal[InputItemType.OUTPUT_MESSAGE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the output message. Always ``message``. Required.""" + role: Literal["assistant"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The role of the output message. Always ``assistant``. Required. Default value is \"assistant\".""" + content: list["_models.OutputMessageContent"] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) - """Connection auth security details. Required.""" + """The content of the output message. Required.""" + status: Literal["in_progress", "completed", "incomplete"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The status of the message input. One of ``in_progress``, ``completed``, or + ``incomplete``. Populated when input items are returned via API. Required. Is one of the + following types: Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"]""" @overload def __init__( self, *, - security_scheme: "_models.OpenApiManagedSecurityScheme", + id: str, # pylint: disable=redefined-builtin + content: list["_models.OutputMessageContent"], + status: Literal["in_progress", "completed", "incomplete"], ) -> None: ... @overload @@ -9192,24 +7763,56 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = OpenApiAuthType.MANAGED_IDENTITY # type: ignore + self.type = InputItemType.OUTPUT_MESSAGE # type: ignore + self.role: Literal["assistant"] = "assistant" -class OpenApiManagedSecurityScheme(_Model): - """Security scheme for OpenApi managed_identity authentication. +class InputItemReasoningItem(InputItem, discriminator="reasoning"): + """Reasoning. - :ivar audience: Authentication scope for managed_identity auth type. Required. - :vartype audience: str + :ivar type: The type of the object. Always ``reasoning``. Required. + :vartype type: str or ~azure.ai.projects.models.REASONING + :ivar id: The unique identifier of the reasoning content. Required. + :vartype id: str + :ivar encrypted_content: + :vartype encrypted_content: str + :ivar summary: Reasoning summary content. Required. + :vartype summary: list[~azure.ai.projects.models.Summary] + :ivar content: Reasoning text content. + :vartype content: list[~azure.ai.projects.models.ReasoningTextContent] + :ivar status: The status of the item. One of ``in_progress``, ``completed``, or + ``incomplete``. Populated when items are returned via API. Is one of the following types: + Literal["in_progress"], Literal["completed"], Literal["incomplete"] + :vartype status: str or str or str """ - audience: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Authentication scope for managed_identity auth type. Required.""" + type: Literal[InputItemType.REASONING] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the object. Always ``reasoning``. Required.""" + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique identifier of the reasoning content. Required.""" + encrypted_content: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + summary: list["_models.Summary"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Reasoning summary content. Required.""" + content: Optional[list["_models.ReasoningTextContent"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Reasoning text content.""" + status: Optional[Literal["in_progress", "completed", "incomplete"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The status of the item. One of ``in_progress``, ``completed``, or + ``incomplete``. Populated when items are returned via API. Is one of the following types: + Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"]""" @overload def __init__( self, *, - audience: str, + id: str, # pylint: disable=redefined-builtin + summary: list["_models.Summary"], + encrypted_content: Optional[str] = None, + content: Optional[list["_models.ReasoningTextContent"]] = None, + status: Optional[Literal["in_progress", "completed", "incomplete"]] = None, ) -> None: ... @overload @@ -9221,29 +7824,52 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) + self.type = InputItemType.REASONING # type: ignore -class OpenApiProjectConnectionAuthDetails(OpenApiAuthDetails, discriminator="project_connection"): - """Security details for OpenApi project connection authentication. +class InputItemWebSearchToolCall(InputItem, discriminator="web_search_call"): + """Web search tool call. - :ivar type: The object type, which is always 'project_connection'. Required. - :vartype type: str or ~azure.ai.projects.models.PROJECT_CONNECTION - :ivar security_scheme: Project connection auth security details. Required. - :vartype security_scheme: ~azure.ai.projects.models.OpenApiProjectConnectionSecurityScheme + :ivar id: The unique ID of the web search tool call. Required. + :vartype id: str + :ivar type: The type of the web search tool call. Always ``web_search_call``. Required. + :vartype type: str or ~azure.ai.projects.models.WEB_SEARCH_CALL + :ivar status: The status of the web search tool call. Required. Is one of the following types: + Literal["in_progress"], Literal["searching"], Literal["completed"], Literal["failed"] + :vartype status: str or str or str or str + :ivar action: An object describing the specific action taken in this web search call. + Includes details on how the model used the web (search, open_page, find). Required. Is one of + the following types: WebSearchActionSearch, WebSearchActionOpenPage, WebSearchActionFind + :vartype action: ~azure.ai.projects.models.WebSearchActionSearch or + ~azure.ai.projects.models.WebSearchActionOpenPage or + ~azure.ai.projects.models.WebSearchActionFind """ - type: Literal[OpenApiAuthType.PROJECT_CONNECTION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The object type, which is always 'project_connection'. Required.""" - security_scheme: "_models.OpenApiProjectConnectionSecurityScheme" = rest_field( + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of the web search tool call. Required.""" + type: Literal[InputItemType.WEB_SEARCH_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the web search tool call. Always ``web_search_call``. Required.""" + status: Literal["in_progress", "searching", "completed", "failed"] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) - """Project connection auth security details. Required.""" + """The status of the web search tool call. Required. Is one of the following types: + Literal[\"in_progress\"], Literal[\"searching\"], Literal[\"completed\"], Literal[\"failed\"]""" + action: Union["_models.WebSearchActionSearch", "_models.WebSearchActionOpenPage", "_models.WebSearchActionFind"] = ( + rest_field(visibility=["read", "create", "update", "delete", "query"]) + ) + """An object describing the specific action taken in this web search call. + Includes details on how the model used the web (search, open_page, find). Required. Is one of + the following types: WebSearchActionSearch, WebSearchActionOpenPage, WebSearchActionFind""" @overload def __init__( self, *, - security_scheme: "_models.OpenApiProjectConnectionSecurityScheme", + id: str, # pylint: disable=redefined-builtin + status: Literal["in_progress", "searching", "completed", "failed"], + action: Union[ + "_models.WebSearchActionSearch", "_models.WebSearchActionOpenPage", "_models.WebSearchActionFind" + ], ) -> None: ... @overload @@ -9255,24 +7881,57 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = OpenApiAuthType.PROJECT_CONNECTION # type: ignore + self.type = InputItemType.WEB_SEARCH_CALL # type: ignore -class OpenApiProjectConnectionSecurityScheme(_Model): - """Security scheme for OpenApi managed_identity authentication. +class ItemResource(_Model): + """Content item used to generate a response. - :ivar project_connection_id: Project connection id for Project Connection auth type. Required. - :vartype project_connection_id: str + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + ItemResourceApplyPatchToolCall, ItemResourceApplyPatchToolCallOutput, + ItemResourceCodeInterpreterToolCall, ItemResourceComputerToolCall, + ItemResourceComputerToolCallOutputResource, ItemResourceFileSearchToolCall, + ItemResourceFunctionToolCallResource, ItemResourceFunctionToolCallOutputResource, + ItemResourceImageGenToolCall, ItemResourceLocalShellToolCall, + ItemResourceLocalShellToolCallOutput, ItemResourceMcpApprovalRequest, + ItemResourceMcpApprovalResponseResource, ItemResourceMcpToolCall, ItemResourceMcpListTools, + MemorySearchToolCallItemResource, InputMessageResource, OAuthConsentRequestItemResource, + ItemResourceOutputMessage, ItemResourceFunctionShellCall, ItemResourceFunctionShellCallOutput, + StructuredOutputsItemResource, ItemResourceWebSearchToolCall, WorkflowActionOutputItemResource + + :ivar type: Required. Known values are: "message", "output_message", "file_search_call", + "computer_call", "computer_call_output", "web_search_call", "function_call", + "function_call_output", "image_generation_call", "code_interpreter_call", "local_shell_call", + "local_shell_call_output", "shell_call", "shell_call_output", "apply_patch_call", + "apply_patch_call_output", "mcp_list_tools", "mcp_approval_request", "mcp_approval_response", + "mcp_call", "structured_outputs", "workflow_action", "memory_search_call", and + "oauth_consent_request". + :vartype type: str or ~azure.ai.projects.models.ItemResourceType + :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or + a str type. + :vartype created_by: ~azure.ai.projects.models.CreatedBy or str """ - project_connection_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Project connection id for Project Connection auth type. Required.""" + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Required. Known values are: \"message\", \"output_message\", \"file_search_call\", + \"computer_call\", \"computer_call_output\", \"web_search_call\", \"function_call\", + \"function_call_output\", \"image_generation_call\", \"code_interpreter_call\", + \"local_shell_call\", \"local_shell_call_output\", \"shell_call\", \"shell_call_output\", + \"apply_patch_call\", \"apply_patch_call_output\", \"mcp_list_tools\", + \"mcp_approval_request\", \"mcp_approval_response\", \"mcp_call\", \"structured_outputs\", + \"workflow_action\", \"memory_search_call\", and \"oauth_consent_request\".""" + created_by: Optional[Union["_models.CreatedBy", str]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The information about the creator of the item. Is either a CreatedBy type or a str type.""" @overload def __init__( self, *, - project_connection_id: str, + type: str, + created_by: Optional[Union["_models.CreatedBy", str]] = None, ) -> None: ... @overload @@ -9286,39 +7945,55 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class PendingUploadRequest(_Model): - """Represents a request for a pending upload. +class InputMessageResource(ItemResource, discriminator="message"): + """InputMessageResource. - :ivar pending_upload_id: If PendingUploadId is not provided, a random GUID will be used. - :vartype pending_upload_id: str - :ivar connection_name: Azure Storage Account connection name to use for generating temporary - SAS token. - :vartype connection_name: str - :ivar pending_upload_type: BlobReference is the only supported type. Required. Blob Reference - is the only supported type. - :vartype pending_upload_type: str or ~azure.ai.projects.models.BLOB_REFERENCE + :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or + a str type. + :vartype created_by: ~azure.ai.projects.models.CreatedBy or str + :ivar type: The type of the message input. Always set to ``message``. Required. + :vartype type: str or ~azure.ai.projects.models.MESSAGE + :ivar role: The role of the message input. One of ``user``, ``system``, or ``developer``. + Required. Is one of the following types: Literal["user"], Literal["system"], + Literal["developer"] + :vartype role: str or str or str + :ivar status: The status of item. One of ``in_progress``, ``completed``, or + ``incomplete``. Populated when items are returned via API. Is one of the following types: + Literal["in_progress"], Literal["completed"], Literal["incomplete"] + :vartype status: str or str or str + :ivar content: Required. + :vartype content: list[~azure.ai.projects.models.InputContent] + :ivar id: The unique ID of the message input. Required. + :vartype id: str """ - pending_upload_id: Optional[str] = rest_field( - name="pendingUploadId", visibility=["read", "create", "update", "delete", "query"] - ) - """If PendingUploadId is not provided, a random GUID will be used.""" - connection_name: Optional[str] = rest_field( - name="connectionName", visibility=["read", "create", "update", "delete", "query"] + type: Literal[ItemResourceType.MESSAGE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the message input. Always set to ``message``. Required.""" + role: Literal["user", "system", "developer"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] ) - """Azure Storage Account connection name to use for generating temporary SAS token.""" - pending_upload_type: Literal[PendingUploadType.BLOB_REFERENCE] = rest_field( - name="pendingUploadType", visibility=["read", "create", "update", "delete", "query"] + """The role of the message input. One of ``user``, ``system``, or ``developer``. Required. Is one + of the following types: Literal[\"user\"], Literal[\"system\"], Literal[\"developer\"]""" + status: Optional[Literal["in_progress", "completed", "incomplete"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] ) - """BlobReference is the only supported type. Required. Blob Reference is the only supported type.""" + """The status of item. One of ``in_progress``, ``completed``, or + ``incomplete``. Populated when items are returned via API. Is one of the following types: + Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"]""" + content: list["_models.InputContent"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required.""" + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of the message input. Required.""" @overload def __init__( self, *, - pending_upload_type: Literal[PendingUploadType.BLOB_REFERENCE], - pending_upload_id: Optional[str] = None, - connection_name: Optional[str] = None, + role: Literal["user", "system", "developer"], + content: list["_models.InputContent"], + id: str, # pylint: disable=redefined-builtin + created_by: Optional[Union["_models.CreatedBy", str]] = None, + status: Optional[Literal["in_progress", "completed", "incomplete"]] = None, ) -> None: ... @overload @@ -9330,46 +8005,29 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) + self.type = ItemResourceType.MESSAGE # type: ignore -class PendingUploadResponse(_Model): - """Represents the response for a pending upload request. +class InputTextContentParam(_Model): + """Input text. - :ivar blob_reference: Container-level read, write, list SAS. Required. - :vartype blob_reference: ~azure.ai.projects.models.BlobReference - :ivar pending_upload_id: ID for this upload request. Required. - :vartype pending_upload_id: str - :ivar version: Version of asset to be created if user did not specify version when initially - creating upload. - :vartype version: str - :ivar pending_upload_type: BlobReference is the only supported type. Required. Blob Reference - is the only supported type. - :vartype pending_upload_type: str or ~azure.ai.projects.models.BLOB_REFERENCE + :ivar type: The type of the input item. Always ``input_text``. Required. Default value is + "input_text". + :vartype type: str + :ivar text: The text input to the model. Required. + :vartype text: str """ - blob_reference: "_models.BlobReference" = rest_field( - name="blobReference", visibility=["read", "create", "update", "delete", "query"] - ) - """Container-level read, write, list SAS. Required.""" - pending_upload_id: str = rest_field( - name="pendingUploadId", visibility=["read", "create", "update", "delete", "query"] - ) - """ID for this upload request. Required.""" - version: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Version of asset to be created if user did not specify version when initially creating upload.""" - pending_upload_type: Literal[PendingUploadType.BLOB_REFERENCE] = rest_field( - name="pendingUploadType", visibility=["read", "create", "update", "delete", "query"] - ) - """BlobReference is the only supported type. Required. Blob Reference is the only supported type.""" + type: Literal["input_text"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The type of the input item. Always ``input_text``. Required. Default value is \"input_text\".""" + text: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The text input to the model. Required.""" @overload def __init__( self, *, - blob_reference: "_models.BlobReference", - pending_upload_id: str, - pending_upload_type: Literal[PendingUploadType.BLOB_REFERENCE], - version: Optional[str] = None, + text: str, ) -> None: ... @overload @@ -9381,36 +8039,47 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) + self.type: Literal["input_text"] = "input_text" -class Prompt(_Model): - """Reference to a prompt template and its variables. - `Learn more - `_. +class Insight(_Model): + """The response body for cluster insights. - :ivar id: The unique identifier of the prompt template to use. Required. + :ivar id: The unique identifier for the insights report. Required. :vartype id: str - :ivar version: Optional version of the prompt template. - :vartype version: str - :ivar variables: - :vartype variables: ~azure.ai.projects.models.ResponsePromptVariables + :ivar metadata: Metadata about the insights report. Required. + :vartype metadata: ~azure.ai.projects.models.InsightsMetadata + :ivar state: The current state of the insights. Required. Known values are: "NotStarted", + "Running", "Succeeded", "Failed", and "Canceled". + :vartype state: str or ~azure.ai.projects.models.OperationState + :ivar display_name: User friendly display name for the insight. Required. + :vartype display_name: str + :ivar request: Request for the insights analysis. Required. + :vartype request: ~azure.ai.projects.models.InsightRequest + :ivar result: The result of the insights report. + :vartype result: ~azure.ai.projects.models.InsightResult """ - id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique identifier of the prompt template to use. Required.""" - version: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Optional version of the prompt template.""" - variables: Optional["_models.ResponsePromptVariables"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) + id: str = rest_field(visibility=["read"]) + """The unique identifier for the insights report. Required.""" + metadata: "_models.InsightsMetadata" = rest_field(visibility=["read"]) + """Metadata about the insights report. Required.""" + state: Union[str, "_models.OperationState"] = rest_field(visibility=["read"]) + """The current state of the insights. Required. Known values are: \"NotStarted\", \"Running\", + \"Succeeded\", \"Failed\", and \"Canceled\".""" + display_name: str = rest_field(name="displayName", visibility=["read", "create", "update", "delete", "query"]) + """User friendly display name for the insight. Required.""" + request: "_models.InsightRequest" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Request for the insights analysis. Required.""" + result: Optional["_models.InsightResult"] = rest_field(visibility=["read"]) + """The result of the insights report.""" @overload def __init__( self, *, - id: str, # pylint: disable=redefined-builtin - version: Optional[str] = None, - variables: Optional["_models.ResponsePromptVariables"] = None, + display_name: str, + request: "_models.InsightRequest", ) -> None: ... @overload @@ -9424,87 +8093,64 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class PromptAgentDefinition(AgentDefinition, discriminator="prompt"): - """The prompt agent definition. - - :ivar rai_config: Configuration for Responsible AI (RAI) content filtering and safety features. - :vartype rai_config: ~azure.ai.projects.models.RaiConfig - :ivar kind: Required. - :vartype kind: str or ~azure.ai.projects.models.PROMPT - :ivar model: The model deployment to use for this agent. Required. - :vartype model: str - :ivar instructions: A system (or developer) message inserted into the model's context. - :vartype instructions: str - :ivar temperature: What sampling temperature to use, between 0 and 2. Higher values like 0.8 - will make the output more random, while lower values like 0.2 will make it more focused and - deterministic. - We generally recommend altering this or ``top_p`` but not both. - :vartype temperature: float - :ivar top_p: An alternative to sampling with temperature, called nucleus sampling, - where the model considers the results of the tokens with top_p probability - mass. So 0.1 means only the tokens comprising the top 10% probability mass - are considered. +class InsightCluster(_Model): + """A cluster of analysis samples. - We generally recommend altering this or ``temperature`` but not both. - :vartype top_p: float - :ivar reasoning: - :vartype reasoning: ~azure.ai.projects.models.Reasoning - :ivar tools: An array of tools the model may call while generating a response. You - can specify which tool to use by setting the ``tool_choice`` parameter. - :vartype tools: list[~azure.ai.projects.models.Tool] - :ivar text: Configuration options for a text response from the model. Can be plain text or - structured JSON data. - :vartype text: ~azure.ai.projects.models.PromptAgentDefinitionText - :ivar structured_inputs: Set of structured inputs that can participate in prompt template - substitution or tool argument bindings. - :vartype structured_inputs: dict[str, ~azure.ai.projects.models.StructuredInputDefinition] + :ivar id: The id of the analysis cluster. Required. + :vartype id: str + :ivar label: Label for the cluster. Required. + :vartype label: str + :ivar suggestion: Suggestion for the cluster. Required. + :vartype suggestion: str + :ivar suggestion_title: The title of the suggestion for the cluster. Required. + :vartype suggestion_title: str + :ivar description: Description of the analysis cluster. Required. + :vartype description: str + :ivar weight: The weight of the analysis cluster. This indicate number of samples in the + cluster. Required. + :vartype weight: int + :ivar sub_clusters: List of subclusters within this cluster. Empty if no subclusters exist. + :vartype sub_clusters: list[~azure.ai.projects.models.InsightCluster] + :ivar samples: List of samples that belong to this cluster. Empty if samples are part of + subclusters. + :vartype samples: list[~azure.ai.projects.models.InsightSample] """ - kind: Literal[AgentKind.PROMPT] = rest_discriminator(name="kind", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - model: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The model deployment to use for this agent. Required.""" - instructions: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """A system (or developer) message inserted into the model's context.""" - temperature: Optional[float] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output - more random, while lower values like 0.2 will make it more focused and deterministic. - We generally recommend altering this or ``top_p`` but not both.""" - top_p: Optional[float] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """An alternative to sampling with temperature, called nucleus sampling, - where the model considers the results of the tokens with top_p probability - mass. So 0.1 means only the tokens comprising the top 10% probability mass - are considered. - - We generally recommend altering this or ``temperature`` but not both.""" - reasoning: Optional["_models.Reasoning"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - tools: Optional[list["_models.Tool"]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """An array of tools the model may call while generating a response. You - can specify which tool to use by setting the ``tool_choice`` parameter.""" - text: Optional["_models.PromptAgentDefinitionText"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The id of the analysis cluster. Required.""" + label: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Label for the cluster. Required.""" + suggestion: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Suggestion for the cluster. Required.""" + suggestion_title: str = rest_field( + name="suggestionTitle", visibility=["read", "create", "update", "delete", "query"] ) - """Configuration options for a text response from the model. Can be plain text or structured JSON - data.""" - structured_inputs: Optional[dict[str, "_models.StructuredInputDefinition"]] = rest_field( + """The title of the suggestion for the cluster. Required.""" + description: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Description of the analysis cluster. Required.""" + weight: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The weight of the analysis cluster. This indicate number of samples in the cluster. Required.""" + sub_clusters: Optional[list["_models.InsightCluster"]] = rest_field( + name="subClusters", visibility=["read", "create", "update", "delete", "query"] + ) + """List of subclusters within this cluster. Empty if no subclusters exist.""" + samples: Optional[list["_models.InsightSample"]] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) - """Set of structured inputs that can participate in prompt template substitution or tool argument - bindings.""" + """List of samples that belong to this cluster. Empty if samples are part of subclusters.""" @overload def __init__( self, *, - model: str, - rai_config: Optional["_models.RaiConfig"] = None, - instructions: Optional[str] = None, - temperature: Optional[float] = None, - top_p: Optional[float] = None, - reasoning: Optional["_models.Reasoning"] = None, - tools: Optional[list["_models.Tool"]] = None, - text: Optional["_models.PromptAgentDefinitionText"] = None, - structured_inputs: Optional[dict[str, "_models.StructuredInputDefinition"]] = None, + id: str, # pylint: disable=redefined-builtin + label: str, + suggestion: str, + suggestion_title: str, + description: str, + weight: int, + sub_clusters: Optional[list["_models.InsightCluster"]] = None, + samples: Optional[list["_models.InsightSample"]] = None, ) -> None: ... @overload @@ -9516,25 +8162,28 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.kind = AgentKind.PROMPT # type: ignore -class PromptAgentDefinitionText(_Model): - """PromptAgentDefinitionText. +class InsightModelConfiguration(_Model): + """Configuration of the model used in the insight generation. - :ivar format: - :vartype format: ~azure.ai.projects.models.ResponseTextFormatConfiguration + :ivar model_deployment_name: The model deployment to be evaluated. Accepts either the + deployment name alone or with the connection name as '{connectionName}/'. + Required. + :vartype model_deployment_name: str """ - format: Optional["_models.ResponseTextFormatConfiguration"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] + model_deployment_name: str = rest_field( + name="modelDeploymentName", visibility=["read", "create", "update", "delete", "query"] ) + """The model deployment to be evaluated. Accepts either the deployment name alone or with the + connection name as '{connectionName}/'. Required.""" @overload def __init__( self, *, - format: Optional["_models.ResponseTextFormatConfiguration"] = None, + model_deployment_name: str, ) -> None: ... @overload @@ -9548,36 +8197,28 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class PromptBasedEvaluatorDefinition(EvaluatorDefinition, discriminator="prompt"): - """Prompt-based evaluator. +class InsightScheduleTask(ScheduleTask, discriminator="Insight"): + """Insight task for the schedule. - :ivar init_parameters: The JSON schema (Draft 2020-12) for the evaluator's input parameters. - This includes parameters like type, properties, required. - :vartype init_parameters: any - :ivar data_schema: The JSON schema (Draft 2020-12) for the evaluator's input data. This - includes parameters like type, properties, required. - :vartype data_schema: any - :ivar metrics: List of output metrics produced by this evaluator. - :vartype metrics: dict[str, ~azure.ai.projects.models.EvaluatorMetric] - :ivar type: Required. Prompt-based definition - :vartype type: str or ~azure.ai.projects.models.PROMPT - :ivar prompt_text: The prompt text used for evaluation. Required. - :vartype prompt_text: str + :ivar configuration: Configuration for the task. + :vartype configuration: dict[str, str] + :ivar type: Required. Insight task. + :vartype type: str or ~azure.ai.projects.models.INSIGHT + :ivar insight: The insight payload. Required. + :vartype insight: ~azure.ai.projects.models.Insight """ - type: Literal[EvaluatorDefinitionType.PROMPT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required. Prompt-based definition""" - prompt_text: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The prompt text used for evaluation. Required.""" + type: Literal[ScheduleTaskType.INSIGHT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required. Insight task.""" + insight: "_models.Insight" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The insight payload. Required.""" @overload def __init__( self, *, - prompt_text: str, - init_parameters: Optional[Any] = None, - data_schema: Optional[Any] = None, - metrics: Optional[dict[str, "_models.EvaluatorMetric"]] = None, + insight: "_models.Insight", + configuration: Optional[dict[str, str]] = None, ) -> None: ... @overload @@ -9589,32 +8230,33 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = EvaluatorDefinitionType.PROMPT # type: ignore + self.type = ScheduleTaskType.INSIGHT # type: ignore -class ProtocolVersionRecord(_Model): - """A record mapping for a single protocol and its version. +class InsightsMetadata(_Model): + """Metadata about the insights. - :ivar protocol: The protocol type. Required. Known values are: "activity_protocol" and - "responses". - :vartype protocol: str or ~azure.ai.projects.models.AgentProtocol - :ivar version: The version string for the protocol, e.g. 'v0.1.1'. Required. - :vartype version: str + :ivar created_at: The timestamp when the insights were created. Required. + :vartype created_at: ~datetime.datetime + :ivar completed_at: The timestamp when the insights were completed. + :vartype completed_at: ~datetime.datetime """ - protocol: Union[str, "_models.AgentProtocol"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] + created_at: datetime.datetime = rest_field( + name="createdAt", visibility=["read", "create", "update", "delete", "query"], format="rfc3339" ) - """The protocol type. Required. Known values are: \"activity_protocol\" and \"responses\".""" - version: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The version string for the protocol, e.g. 'v0.1.1'. Required.""" + """The timestamp when the insights were created. Required.""" + completed_at: Optional[datetime.datetime] = rest_field( + name="completedAt", visibility=["read", "create", "update", "delete", "query"], format="rfc3339" + ) + """The timestamp when the insights were completed.""" @overload def __init__( self, *, - protocol: Union[str, "_models.AgentProtocol"], - version: str, + created_at: datetime.datetime, + completed_at: Optional[datetime.datetime] = None, ) -> None: ... @overload @@ -9628,21 +8270,45 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class RaiConfig(_Model): - """Configuration for Responsible AI (RAI) content filtering and safety features. +class InsightSummary(_Model): + """Summary of the error cluster analysis. - :ivar rai_policy_name: The name of the RAI policy to apply. Required. - :vartype rai_policy_name: str + :ivar sample_count: Total number of samples analyzed. Required. + :vartype sample_count: int + :ivar unique_subcluster_count: Total number of unique subcluster labels. Required. + :vartype unique_subcluster_count: int + :ivar unique_cluster_count: Total number of unique clusters. Required. + :vartype unique_cluster_count: int + :ivar method: Method used for clustering. Required. + :vartype method: str + :ivar usage: Token usage while performing clustering analysis. Required. + :vartype usage: ~azure.ai.projects.models.ClusterTokenUsage """ - rai_policy_name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The name of the RAI policy to apply. Required.""" + sample_count: int = rest_field(name="sampleCount", visibility=["read", "create", "update", "delete", "query"]) + """Total number of samples analyzed. Required.""" + unique_subcluster_count: int = rest_field( + name="uniqueSubclusterCount", visibility=["read", "create", "update", "delete", "query"] + ) + """Total number of unique subcluster labels. Required.""" + unique_cluster_count: int = rest_field( + name="uniqueClusterCount", visibility=["read", "create", "update", "delete", "query"] + ) + """Total number of unique clusters. Required.""" + method: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Method used for clustering. Required.""" + usage: "_models.ClusterTokenUsage" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Token usage while performing clustering analysis. Required.""" @overload def __init__( self, *, - rai_policy_name: str, + sample_count: int, + unique_subcluster_count: int, + unique_cluster_count: int, + method: str, + usage: "_models.ClusterTokenUsage", ) -> None: ... @overload @@ -9656,33 +8322,25 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class RankingOptions(_Model): - """RankingOptions. +class ItemReferenceParam(InputItem, discriminator="item_reference"): + """Item reference. - :ivar ranker: The ranker to use for the file search. Is either a Literal["auto"] type or a - Literal["default-2024-11-15"] type. - :vartype ranker: str or str - :ivar score_threshold: The score threshold for the file search, a number between 0 and 1. - Numbers closer to 1 will attempt to return only the most relevant results, but may return fewer - results. - :vartype score_threshold: float + :ivar type: The type of item to reference. Always ``item_reference``. Required. + :vartype type: str or ~azure.ai.projects.models.ITEM_REFERENCE + :ivar id: The ID of the item to reference. Required. + :vartype id: str """ - ranker: Optional[Literal["auto", "default-2024-11-15"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The ranker to use for the file search. Is either a Literal[\"auto\"] type or a - Literal[\"default-2024-11-15\"] type.""" - score_threshold: Optional[float] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The score threshold for the file search, a number between 0 and 1. Numbers closer to 1 will - attempt to return only the most relevant results, but may return fewer results.""" + type: Literal[InputItemType.ITEM_REFERENCE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of item to reference. Always ``item_reference``. Required.""" + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the item to reference. Required.""" @overload def __init__( self, *, - ranker: Optional[Literal["auto", "default-2024-11-15"]] = None, - score_threshold: Optional[float] = None, + id: str, # pylint: disable=redefined-builtin ) -> None: ... @overload @@ -9694,54 +8352,60 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) + self.type = InputItemType.ITEM_REFERENCE # type: ignore -class Reasoning(_Model): - """**o-series models only** - - Configuration options for `reasoning models - `_. +class ItemResourceApplyPatchToolCall(ItemResource, discriminator="apply_patch_call"): + """Apply patch tool call. - :ivar effort: Known values are: "none", "minimal", "low", "medium", and "high". - :vartype effort: str or ~azure.ai.projects.models.ReasoningEffort - :ivar summary: A summary of the reasoning performed by the model. This can be - useful for debugging and understanding the model's reasoning process. - One of ``auto``, ``concise``, or ``detailed``. Is one of the following types: Literal["auto"], - Literal["concise"], Literal["detailed"] - :vartype summary: str or str or str - :ivar generate_summary: **Deprecated**: use ``summary`` instead. A summary of the reasoning - performed by the model. This can be useful for debugging and understanding the model's - reasoning process. One of ``auto``, ``concise``, or ``detailed``. Is one of the following - types: Literal["auto"], Literal["concise"], Literal["detailed"] - :vartype generate_summary: str or str or str + :ivar type: The type of the item. Always ``apply_patch_call``. Required. + :vartype type: str or ~azure.ai.projects.models.APPLY_PATCH_CALL + :ivar id: The unique ID of the apply patch tool call. Populated when this item is returned via + API. Required. + :vartype id: str + :ivar call_id: The unique ID of the apply patch tool call generated by the model. Required. + :vartype call_id: str + :ivar status: The status of the apply patch tool call. One of ``in_progress`` or ``completed``. + Required. Known values are: "in_progress" and "completed". + :vartype status: str or ~azure.ai.projects.models.ApplyPatchCallStatus + :ivar operation: One of the create_file, delete_file, or update_file operations applied via + apply_patch. Required. + :vartype operation: ~azure.ai.projects.models.ApplyPatchFileOperation + :ivar created_by: The ID of the entity that created this tool call. + :vartype created_by: str """ - effort: Optional[Union[str, "_models.ReasoningEffort"]] = rest_field( + type: Literal[ItemResourceType.APPLY_PATCH_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the item. Always ``apply_patch_call``. Required.""" + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of the apply patch tool call. Populated when this item is returned via API. + Required.""" + call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of the apply patch tool call generated by the model. Required.""" + status: Union[str, "_models.ApplyPatchCallStatus"] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) - """Known values are: \"none\", \"minimal\", \"low\", \"medium\", and \"high\".""" - summary: Optional[Literal["auto", "concise", "detailed"]] = rest_field( + """The status of the apply patch tool call. One of ``in_progress`` or ``completed``. Required. + Known values are: \"in_progress\" and \"completed\".""" + operation: "_models.ApplyPatchFileOperation" = rest_field( visibility=["read", "create", "update", "delete", "query"] ) - """A summary of the reasoning performed by the model. This can be - useful for debugging and understanding the model's reasoning process. - One of ``auto``, ``concise``, or ``detailed``. Is one of the following types: - Literal[\"auto\"], Literal[\"concise\"], Literal[\"detailed\"]""" - generate_summary: Optional[Literal["auto", "concise", "detailed"]] = rest_field( + """One of the create_file, delete_file, or update_file operations applied via apply_patch. + Required.""" + created_by: Optional[str] = rest_field( # pyright: ignore[reportIncompatibleVariableOverride] visibility=["read", "create", "update", "delete", "query"] ) - """**Deprecated**: use ``summary`` instead. A summary of the reasoning performed by the model. - This can be useful for debugging and understanding the model's reasoning process. One of - ``auto``, ``concise``, or ``detailed``. Is one of the following types: Literal[\"auto\"], - Literal[\"concise\"], Literal[\"detailed\"]""" + """The ID of the entity that created this tool call.""" @overload def __init__( self, *, - effort: Optional[Union[str, "_models.ReasoningEffort"]] = None, - summary: Optional[Literal["auto", "concise", "detailed"]] = None, - generate_summary: Optional[Literal["auto", "concise", "detailed"]] = None, + id: str, # pylint: disable=redefined-builtin + call_id: str, + status: Union[str, "_models.ApplyPatchCallStatus"], + operation: "_models.ApplyPatchFileOperation", + created_by: Optional[str] = None, ) -> None: ... @overload @@ -9753,40 +8417,55 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) + self.type = ItemResourceType.APPLY_PATCH_CALL # type: ignore -class ReasoningItemParam(ItemParam, discriminator="reasoning"): - """A description of the chain of thought used by a reasoning model while generating - a response. Be sure to include these items in your ``input`` to the Responses API - for subsequent turns of a conversation if you are manually - `managing conversation state `_. +class ItemResourceApplyPatchToolCallOutput(ItemResource, discriminator="apply_patch_call_output"): + """Apply patch tool call output. - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.REASONING - :ivar encrypted_content: The encrypted content of the reasoning item - populated when a - response is - generated with ``reasoning.encrypted_content`` in the ``include`` parameter. - :vartype encrypted_content: str - :ivar summary: Reasoning text contents. Required. - :vartype summary: list[~azure.ai.projects.models.ReasoningItemSummaryPart] + :ivar type: The type of the item. Always ``apply_patch_call_output``. Required. + :vartype type: str or ~azure.ai.projects.models.APPLY_PATCH_CALL_OUTPUT + :ivar id: The unique ID of the apply patch tool call output. Populated when this item is + returned via API. Required. + :vartype id: str + :ivar call_id: The unique ID of the apply patch tool call generated by the model. Required. + :vartype call_id: str + :ivar status: The status of the apply patch tool call output. One of ``completed`` or + ``failed``. Required. Known values are: "completed" and "failed". + :vartype status: str or ~azure.ai.projects.models.ApplyPatchCallOutputStatus + :ivar output: + :vartype output: str + :ivar created_by: The ID of the entity that created this tool call output. + :vartype created_by: str """ - type: Literal[ItemType.REASONING] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - encrypted_content: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The encrypted content of the reasoning item - populated when a response is - generated with ``reasoning.encrypted_content`` in the ``include`` parameter.""" - summary: list["_models.ReasoningItemSummaryPart"] = rest_field( + type: Literal[ItemResourceType.APPLY_PATCH_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the item. Always ``apply_patch_call_output``. Required.""" + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of the apply patch tool call output. Populated when this item is returned via + API. Required.""" + call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of the apply patch tool call generated by the model. Required.""" + status: Union[str, "_models.ApplyPatchCallOutputStatus"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The status of the apply patch tool call output. One of ``completed`` or ``failed``. Required. + Known values are: \"completed\" and \"failed\".""" + output: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + created_by: Optional[str] = rest_field( # pyright: ignore[reportIncompatibleVariableOverride] visibility=["read", "create", "update", "delete", "query"] ) - """Reasoning text contents. Required.""" + """The ID of the entity that created this tool call output.""" @overload def __init__( self, *, - summary: list["_models.ReasoningItemSummaryPart"], - encrypted_content: Optional[str] = None, + id: str, # pylint: disable=redefined-builtin + call_id: str, + status: Union[str, "_models.ApplyPatchCallOutputStatus"], + output: Optional[str] = None, + created_by: Optional[str] = None, ) -> None: ... @overload @@ -9798,47 +8477,64 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ItemType.REASONING # type: ignore + self.type = ItemResourceType.APPLY_PATCH_CALL_OUTPUT # type: ignore -class ReasoningItemResource(ItemResource, discriminator="reasoning"): - """A description of the chain of thought used by a reasoning model while generating - a response. Be sure to include these items in your ``input`` to the Responses API - for subsequent turns of a conversation if you are manually - `managing conversation state `_. +class ItemResourceCodeInterpreterToolCall(ItemResource, discriminator="code_interpreter_call"): + """Code interpreter tool call. - :ivar id: Required. + :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or + a str type. + :vartype created_by: ~azure.ai.projects.models.CreatedBy or str + :ivar type: The type of the code interpreter tool call. Always ``code_interpreter_call``. + Required. + :vartype type: str or ~azure.ai.projects.models.CODE_INTERPRETER_CALL + :ivar id: The unique ID of the code interpreter tool call. Required. :vartype id: str - :ivar created_by: The information about the creator of the item. - :vartype created_by: ~azure.ai.projects.models.CreatedBy - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.REASONING - :ivar encrypted_content: The encrypted content of the reasoning item - populated when a - response is - generated with ``reasoning.encrypted_content`` in the ``include`` parameter. - :vartype encrypted_content: str - :ivar summary: Reasoning text contents. Required. - :vartype summary: list[~azure.ai.projects.models.ReasoningItemSummaryPart] + :ivar status: The status of the code interpreter tool call. Valid values are ``in_progress``, + ``completed``, ``incomplete``, ``interpreting``, and ``failed``. Required. Is one of the + following types: Literal["in_progress"], Literal["completed"], Literal["incomplete"], + Literal["interpreting"], Literal["failed"] + :vartype status: str or str or str or str or str + :ivar container_id: The ID of the container used to run the code. Required. + :vartype container_id: str + :ivar code: Required. + :vartype code: str + :ivar outputs: Required. + :vartype outputs: list[~azure.ai.projects.models.CodeInterpreterOutputLogs or + ~azure.ai.projects.models.CodeInterpreterOutputImage] """ - type: Literal[ItemType.REASONING] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + type: Literal[ItemResourceType.CODE_INTERPRETER_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the code interpreter tool call. Always ``code_interpreter_call``. Required.""" + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of the code interpreter tool call. Required.""" + status: Literal["in_progress", "completed", "incomplete", "interpreting", "failed"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The status of the code interpreter tool call. Valid values are ``in_progress``, ``completed``, + ``incomplete``, ``interpreting``, and ``failed``. Required. Is one of the following types: + Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"], + Literal[\"interpreting\"], Literal[\"failed\"]""" + container_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the container used to run the code. Required.""" + code: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) """Required.""" - encrypted_content: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The encrypted content of the reasoning item - populated when a response is - generated with ``reasoning.encrypted_content`` in the ``include`` parameter.""" - summary: list["_models.ReasoningItemSummaryPart"] = rest_field( + outputs: list[Union["_models.CodeInterpreterOutputLogs", "_models.CodeInterpreterOutputImage"]] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) - """Reasoning text contents. Required.""" + """Required.""" @overload def __init__( self, *, id: str, # pylint: disable=redefined-builtin - summary: list["_models.ReasoningItemSummaryPart"], - created_by: Optional["_models.CreatedBy"] = None, - encrypted_content: Optional[str] = None, + status: Literal["in_progress", "completed", "incomplete", "interpreting", "failed"], + container_id: str, + code: str, + outputs: list[Union["_models.CodeInterpreterOutputLogs", "_models.CodeInterpreterOutputImage"]], + created_by: Optional[Union["_models.CreatedBy", str]] = None, ) -> None: ... @overload @@ -9850,28 +8546,60 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ItemType.REASONING # type: ignore - + self.type = ItemResourceType.CODE_INTERPRETER_CALL # type: ignore -class ReasoningItemSummaryPart(_Model): - """ReasoningItemSummaryPart. - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - ReasoningItemSummaryTextPart +class ItemResourceComputerToolCall(ItemResource, discriminator="computer_call"): + """Computer tool call. - :ivar type: Required. "summary_text" - :vartype type: str or ~azure.ai.projects.models.ReasoningItemSummaryPartType + :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or + a str type. + :vartype created_by: ~azure.ai.projects.models.CreatedBy or str + :ivar type: The type of the computer call. Always ``computer_call``. Required. + :vartype type: str or ~azure.ai.projects.models.COMPUTER_CALL + :ivar id: The unique ID of the computer call. Required. + :vartype id: str + :ivar call_id: An identifier used when responding to the tool call with output. Required. + :vartype call_id: str + :ivar action: Required. + :vartype action: ~azure.ai.projects.models.ComputerAction + :ivar pending_safety_checks: The pending safety checks for the computer call. Required. + :vartype pending_safety_checks: list[~azure.ai.projects.models.ComputerCallSafetyCheckParam] + :ivar status: The status of the item. One of ``in_progress``, ``completed``, or + ``incomplete``. Populated when items are returned via API. Required. Is one of the following + types: Literal["in_progress"], Literal["completed"], Literal["incomplete"] + :vartype status: str or str or str """ - __mapping__: dict[str, _Model] = {} - type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) - """Required. \"summary_text\"""" + type: Literal[ItemResourceType.COMPUTER_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the computer call. Always ``computer_call``. Required.""" + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of the computer call. Required.""" + call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """An identifier used when responding to the tool call with output. Required.""" + action: "_models.ComputerAction" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required.""" + pending_safety_checks: list["_models.ComputerCallSafetyCheckParam"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The pending safety checks for the computer call. Required.""" + status: Literal["in_progress", "completed", "incomplete"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The status of the item. One of ``in_progress``, ``completed``, or + ``incomplete``. Populated when items are returned via API. Required. Is one of the following + types: Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"]""" @overload def __init__( self, *, - type: str, + id: str, # pylint: disable=redefined-builtin + call_id: str, + action: "_models.ComputerAction", + pending_safety_checks: list["_models.ComputerCallSafetyCheckParam"], + status: Literal["in_progress", "completed", "incomplete"], + created_by: Optional[Union["_models.CreatedBy", str]] = None, ) -> None: ... @overload @@ -9883,27 +8611,66 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) + self.type = ItemResourceType.COMPUTER_CALL # type: ignore -class ReasoningItemSummaryTextPart(ReasoningItemSummaryPart, discriminator="summary_text"): - """ReasoningItemSummaryTextPart. +class ItemResourceComputerToolCallOutputResource( + ItemResource, discriminator="computer_call_output" +): # pylint: disable=name-too-long + """ItemResourceComputerToolCallOutputResource. - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.SUMMARY_TEXT - :ivar text: Required. - :vartype text: str + :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or + a str type. + :vartype created_by: ~azure.ai.projects.models.CreatedBy or str + :ivar type: The type of the computer tool call output. Always ``computer_call_output``. + Required. + :vartype type: str or ~azure.ai.projects.models.COMPUTER_CALL_OUTPUT + :ivar id: The ID of the computer tool call output. + :vartype id: str + :ivar call_id: The ID of the computer tool call that produced the output. Required. + :vartype call_id: str + :ivar acknowledged_safety_checks: The safety checks reported by the API that have been + acknowledged by the developer. + :vartype acknowledged_safety_checks: + list[~azure.ai.projects.models.ComputerCallSafetyCheckParam] + :ivar output: Required. + :vartype output: ~azure.ai.projects.models.ComputerScreenshotImage + :ivar status: The status of the message input. One of ``in_progress``, ``completed``, or + ``incomplete``. Populated when input items are returned via API. Is one of the following + types: Literal["in_progress"], Literal["completed"], Literal["incomplete"] + :vartype status: str or str or str """ - type: Literal[ReasoningItemSummaryPartType.SUMMARY_TEXT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - text: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + type: Literal[ItemResourceType.COMPUTER_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the computer tool call output. Always ``computer_call_output``. Required.""" + id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the computer tool call output.""" + call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the computer tool call that produced the output. Required.""" + acknowledged_safety_checks: Optional[list["_models.ComputerCallSafetyCheckParam"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The safety checks reported by the API that have been acknowledged by the + developer.""" + output: "_models.ComputerScreenshotImage" = rest_field(visibility=["read", "create", "update", "delete", "query"]) """Required.""" + status: Optional[Literal["in_progress", "completed", "incomplete"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The status of the message input. One of ``in_progress``, ``completed``, or + ``incomplete``. Populated when input items are returned via API. Is one of the following + types: Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"]""" @overload def __init__( self, *, - text: str, + call_id: str, + output: "_models.ComputerScreenshotImage", + created_by: Optional[Union["_models.CreatedBy", str]] = None, + id: Optional[str] = None, # pylint: disable=redefined-builtin + acknowledged_safety_checks: Optional[list["_models.ComputerCallSafetyCheckParam"]] = None, + status: Optional[Literal["in_progress", "completed", "incomplete"]] = None, ) -> None: ... @overload @@ -9915,48 +8682,56 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ReasoningItemSummaryPartType.SUMMARY_TEXT # type: ignore + self.type = ItemResourceType.COMPUTER_CALL_OUTPUT # type: ignore -class RecurrenceTrigger(Trigger, discriminator="Recurrence"): - """Recurrence based trigger. +class ItemResourceFileSearchToolCall(ItemResource, discriminator="file_search_call"): + """File search tool call. - :ivar type: Type of the trigger. Required. Recurrence based trigger. - :vartype type: str or ~azure.ai.projects.models.RECURRENCE - :ivar start_time: Start time for the recurrence schedule in ISO 8601 format. - :vartype start_time: str - :ivar end_time: End time for the recurrence schedule in ISO 8601 format. - :vartype end_time: str - :ivar time_zone: Time zone for the recurrence schedule. - :vartype time_zone: str - :ivar interval: Interval for the recurrence schedule. Required. - :vartype interval: int - :ivar schedule: Recurrence schedule for the recurrence trigger. Required. - :vartype schedule: ~azure.ai.projects.models.RecurrenceSchedule + :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or + a str type. + :vartype created_by: ~azure.ai.projects.models.CreatedBy or str + :ivar id: The unique ID of the file search tool call. Required. + :vartype id: str + :ivar type: The type of the file search tool call. Always ``file_search_call``. Required. + :vartype type: str or ~azure.ai.projects.models.FILE_SEARCH_CALL + :ivar status: The status of the file search tool call. One of ``in_progress``, + ``searching``, ``incomplete`` or ``failed``,. Required. Is one of the following types: + Literal["in_progress"], Literal["searching"], Literal["completed"], Literal["incomplete"], + Literal["failed"] + :vartype status: str or str or str or str or str + :ivar queries: The queries used to search for files. Required. + :vartype queries: list[str] + :ivar results: + :vartype results: list[~azure.ai.projects.models.FileSearchToolCallResults] """ - type: Literal[TriggerType.RECURRENCE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Type of the trigger. Required. Recurrence based trigger.""" - start_time: Optional[str] = rest_field(name="startTime", visibility=["read", "create", "update", "delete", "query"]) - """Start time for the recurrence schedule in ISO 8601 format.""" - end_time: Optional[str] = rest_field(name="endTime", visibility=["read", "create", "update", "delete", "query"]) - """End time for the recurrence schedule in ISO 8601 format.""" - time_zone: Optional[str] = rest_field(name="timeZone", visibility=["read", "create", "update", "delete", "query"]) - """Time zone for the recurrence schedule.""" - interval: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Interval for the recurrence schedule. Required.""" - schedule: "_models.RecurrenceSchedule" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Recurrence schedule for the recurrence trigger. Required.""" + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of the file search tool call. Required.""" + type: Literal[ItemResourceType.FILE_SEARCH_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the file search tool call. Always ``file_search_call``. Required.""" + status: Literal["in_progress", "searching", "completed", "incomplete", "failed"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The status of the file search tool call. One of ``in_progress``, + ``searching``, ``incomplete`` or ``failed``,. Required. Is one of the following types: + Literal[\"in_progress\"], Literal[\"searching\"], Literal[\"completed\"], + Literal[\"incomplete\"], Literal[\"failed\"]""" + queries: list[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The queries used to search for files. Required.""" + results: Optional[list["_models.FileSearchToolCallResults"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) @overload def __init__( self, *, - interval: int, - schedule: "_models.RecurrenceSchedule", - start_time: Optional[str] = None, - end_time: Optional[str] = None, - time_zone: Optional[str] = None, + id: str, # pylint: disable=redefined-builtin + status: Literal["in_progress", "searching", "completed", "incomplete", "failed"], + queries: list[str], + created_by: Optional[Union["_models.CreatedBy", str]] = None, + results: Optional[list["_models.FileSearchToolCallResults"]] = None, ) -> None: ... @overload @@ -9968,87 +8743,55 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = TriggerType.RECURRENCE # type: ignore + self.type = ItemResourceType.FILE_SEARCH_CALL # type: ignore -class RedTeam(_Model): - """Red team details. +class ItemResourceFunctionShellCall(ItemResource, discriminator="shell_call"): + """Shell tool call. - :ivar name: Identifier of the red team run. Required. - :vartype name: str - :ivar display_name: Name of the red-team run. - :vartype display_name: str - :ivar num_turns: Number of simulation rounds. - :vartype num_turns: int - :ivar attack_strategies: List of attack strategies or nested lists of attack strategies. - :vartype attack_strategies: list[str or ~azure.ai.projects.models.AttackStrategy] - :ivar simulation_only: Simulation-only or Simulation + Evaluation. Default false, if true the - scan outputs conversation not evaluation result. - :vartype simulation_only: bool - :ivar risk_categories: List of risk categories to generate attack objectives for. - :vartype risk_categories: list[str or ~azure.ai.projects.models.RiskCategory] - :ivar application_scenario: Application scenario for the red team operation, to generate - scenario specific attacks. - :vartype application_scenario: str - :ivar tags: Red team's tags. Unlike properties, tags are fully mutable. - :vartype tags: dict[str, str] - :ivar properties: Red team's properties. Unlike tags, properties are add-only. Once added, a - property cannot be removed. - :vartype properties: dict[str, str] - :ivar status: Status of the red-team. It is set by service and is read-only. - :vartype status: str - :ivar target: Target configuration for the red-team run. Required. - :vartype target: ~azure.ai.projects.models.TargetConfig + :ivar type: The type of the item. Always ``shell_call``. Required. + :vartype type: str or ~azure.ai.projects.models.SHELL_CALL + :ivar id: The unique ID of the shell tool call. Populated when this item is returned via API. + Required. + :vartype id: str + :ivar call_id: The unique ID of the shell tool call generated by the model. Required. + :vartype call_id: str + :ivar action: The shell commands and limits that describe how to run the tool call. Required. + :vartype action: ~azure.ai.projects.models.FunctionShellAction + :ivar status: The status of the shell call. One of ``in_progress``, ``completed``, or + ``incomplete``. Required. Known values are: "in_progress", "completed", and "incomplete". + :vartype status: str or ~azure.ai.projects.models.LocalShellCallStatus + :ivar created_by: The ID of the entity that created this tool call. + :vartype created_by: str """ - name: str = rest_field(name="id", visibility=["read"]) - """Identifier of the red team run. Required.""" - display_name: Optional[str] = rest_field( - name="displayName", visibility=["read", "create", "update", "delete", "query"] - ) - """Name of the red-team run.""" - num_turns: Optional[int] = rest_field(name="numTurns", visibility=["read", "create", "update", "delete", "query"]) - """Number of simulation rounds.""" - attack_strategies: Optional[list[Union[str, "_models.AttackStrategy"]]] = rest_field( - name="attackStrategies", visibility=["read", "create", "update", "delete", "query"] - ) - """List of attack strategies or nested lists of attack strategies.""" - simulation_only: Optional[bool] = rest_field( - name="simulationOnly", visibility=["read", "create", "update", "delete", "query"] - ) - """Simulation-only or Simulation + Evaluation. Default false, if true the scan outputs - conversation not evaluation result.""" - risk_categories: Optional[list[Union[str, "_models.RiskCategory"]]] = rest_field( - name="riskCategories", visibility=["read", "create", "update", "delete", "query"] + type: Literal[ItemResourceType.SHELL_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the item. Always ``shell_call``. Required.""" + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of the shell tool call. Populated when this item is returned via API. Required.""" + call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of the shell tool call generated by the model. Required.""" + action: "_models.FunctionShellAction" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The shell commands and limits that describe how to run the tool call. Required.""" + status: Union[str, "_models.LocalShellCallStatus"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] ) - """List of risk categories to generate attack objectives for.""" - application_scenario: Optional[str] = rest_field( - name="applicationScenario", visibility=["read", "create", "update", "delete", "query"] + """The status of the shell call. One of ``in_progress``, ``completed``, or ``incomplete``. + Required. Known values are: \"in_progress\", \"completed\", and \"incomplete\".""" + created_by: Optional[str] = rest_field( # pyright: ignore[reportIncompatibleVariableOverride] + visibility=["read", "create", "update", "delete", "query"] ) - """Application scenario for the red team operation, to generate scenario specific attacks.""" - tags: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Red team's tags. Unlike properties, tags are fully mutable.""" - properties: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Red team's properties. Unlike tags, properties are add-only. Once added, a property cannot be - removed.""" - status: Optional[str] = rest_field(visibility=["read"]) - """Status of the red-team. It is set by service and is read-only.""" - target: "_models.TargetConfig" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Target configuration for the red-team run. Required.""" + """The ID of the entity that created this tool call.""" @overload def __init__( self, *, - target: "_models.TargetConfig", - display_name: Optional[str] = None, - num_turns: Optional[int] = None, - attack_strategies: Optional[list[Union[str, "_models.AttackStrategy"]]] = None, - simulation_only: Optional[bool] = None, - risk_categories: Optional[list[Union[str, "_models.RiskCategory"]]] = None, - application_scenario: Optional[str] = None, - tags: Optional[dict[str, str]] = None, - properties: Optional[dict[str, str]] = None, + id: str, # pylint: disable=redefined-builtin + call_id: str, + action: "_models.FunctionShellAction", + status: Union[str, "_models.LocalShellCallStatus"], + created_by: Optional[str] = None, ) -> None: ... @overload @@ -10060,307 +8803,53 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) + self.type = ItemResourceType.SHELL_CALL # type: ignore -class Response(_Model): - """Response. +class ItemResourceFunctionShellCallOutput(ItemResource, discriminator="shell_call_output"): + """Shell call output. - :ivar metadata: Set of 16 key-value pairs that can be attached to an object. This can be - useful for storing additional information about the object in a structured - format, and querying for objects via API or the dashboard. - Keys are strings with a maximum length of 64 characters. Values are strings - with a maximum length of 512 characters. Required. - :vartype metadata: dict[str, str] - :ivar temperature: What sampling temperature to use, between 0 and 2. Higher values like 0.8 - will make the output more random, while lower values like 0.2 will make it more focused and - deterministic. - We generally recommend altering this or ``top_p`` but not both. Required. - :vartype temperature: float - :ivar top_p: An alternative to sampling with temperature, called nucleus sampling, - where the model considers the results of the tokens with top_p probability - mass. So 0.1 means only the tokens comprising the top 10% probability mass - are considered. - We generally recommend altering this or ``temperature`` but not both. Required. - :vartype top_p: float - :ivar user: A unique identifier representing your end-user, which can help OpenAI to monitor - and detect abuse. `Learn more about safety best practices - `_. Required. - :vartype user: str - :ivar service_tier: Note: service_tier is not applicable to Azure OpenAI. Known values are: - "auto", "default", "flex", "scale", and "priority". - :vartype service_tier: str or ~azure.ai.projects.models.ServiceTier - :ivar top_logprobs: An integer between 0 and 20 specifying the number of most likely tokens to - return at each token position, each with an associated log probability. - :vartype top_logprobs: int - :ivar previous_response_id: The unique ID of the previous response to the model. Use this to - create multi-turn conversations. Learn more about - `managing conversation state `_. - :vartype previous_response_id: str - :ivar model: The model deployment to use for the creation of this response. - :vartype model: str - :ivar reasoning: - :vartype reasoning: ~azure.ai.projects.models.Reasoning - :ivar background: Whether to run the model response in the background. - `Learn more about background responses `_. - :vartype background: bool - :ivar max_output_tokens: An upper bound for the number of tokens that can be generated for a - response, including visible output tokens and `reasoning tokens - `_. - :vartype max_output_tokens: int - :ivar max_tool_calls: The maximum number of total calls to built-in tools that can be processed - in a response. This maximum number applies across all built-in tool calls, not per individual - tool. Any further attempts to call a tool by the model will be ignored. - :vartype max_tool_calls: int - :ivar text: Configuration options for a text response from the model. Can be plain - text or structured JSON data. See `Text inputs and outputs - `_ - and `Structured Outputs `_. - :vartype text: ~azure.ai.projects.models.ResponseText - :ivar tools: An array of tools the model may call while generating a response. You - can specify which tool to use by setting the ``tool_choice`` parameter. - The two categories of tools you can provide the model are: - * **Built-in tools**: Tools that are provided by OpenAI that extend the - model's capabilities, like [web - search](https://platform.openai.com/docs/guides/tools-web-search) - or [file search](https://platform.openai.com/docs/guides/tools-file-search). Learn more about - [built-in tools](https://platform.openai.com/docs/guides/tools). - * **Function calls (custom tools)**: Functions that are defined by you, - enabling the model to call your own code. Learn more about - [function calling](https://platform.openai.com/docs/guides/function-calling). - :vartype tools: list[~azure.ai.projects.models.Tool] - :ivar tool_choice: How the model should select which tool (or tools) to use when generating - a response. See the ``tools`` parameter to see how to specify which tools - the model can call. Is either a Union[str, "_models.ToolChoiceOptions"] type or a - ToolChoiceObject type. - :vartype tool_choice: str or ~azure.ai.projects.models.ToolChoiceOptions or - ~azure.ai.projects.models.ToolChoiceObject - :ivar prompt: - :vartype prompt: ~azure.ai.projects.models.Prompt - :ivar truncation: The truncation strategy to use for the model response. - * `auto`: If the context of this response and previous ones exceeds - the model's context window size, the model will truncate the - response to fit the context window by dropping input items in the - middle of the conversation. - * `disabled` (default): If a model response will exceed the context window - size for a model, the request will fail with a 400 error. Is either a Literal["auto"] type or a - Literal["disabled"] type. - :vartype truncation: str or str - :ivar id: Unique identifier for this Response. Required. + :ivar type: The type of the shell call output. Always ``shell_call_output``. Required. + :vartype type: str or ~azure.ai.projects.models.SHELL_CALL_OUTPUT + :ivar id: The unique ID of the shell call output. Populated when this item is returned via API. + Required. :vartype id: str - :ivar object: The object type of this resource - always set to ``response``. Required. Default - value is "response". - :vartype object: str - :ivar status: The status of the response generation. One of ``completed``, ``failed``, - ``in_progress``, ``cancelled``, ``queued``, or ``incomplete``. Is one of the following types: - Literal["completed"], Literal["failed"], Literal["in_progress"], Literal["cancelled"], - Literal["queued"], Literal["incomplete"] - :vartype status: str or str or str or str or str or str - :ivar created_at: Unix timestamp (in seconds) of when this Response was created. Required. - :vartype created_at: ~datetime.datetime - :ivar error: Required. - :vartype error: ~azure.ai.projects.models.ResponseError - :ivar incomplete_details: Details about why the response is incomplete. Required. - :vartype incomplete_details: ~azure.ai.projects.models.ResponseIncompleteDetails1 - :ivar output: An array of content items generated by the model. - * The length and order of items in the `output` array is dependent - on the model's response. - * Rather than accessing the first item in the `output` array and - assuming it's an `assistant` message with the content generated by - the model, you might consider using the `output_text` property where - supported in SDKs. Required. - :vartype output: list[~azure.ai.projects.models.ItemResource] - :ivar instructions: A system (or developer) message inserted into the model's context. - When using along with ``previous_response_id``, the instructions from a previous - response will not be carried over to the next response. This makes it simple - to swap out system (or developer) messages in new responses. Required. Is either a str type or - a [ItemParam] type. - :vartype instructions: str or list[~azure.ai.projects.models.ItemParam] - :ivar output_text: SDK-only convenience property that contains the aggregated text output - from all ``output_text`` items in the ``output`` array, if any are present. - Supported in the Python and JavaScript SDKs. - :vartype output_text: str - :ivar usage: - :vartype usage: ~azure.ai.projects.models.ResponseUsage - :ivar parallel_tool_calls: Whether to allow the model to run tool calls in parallel. Required. - :vartype parallel_tool_calls: bool - :ivar conversation: Required. - :vartype conversation: ~azure.ai.projects.models.ResponseConversation1 - :ivar agent: The agent used for this response. - :vartype agent: ~azure.ai.projects.models.AgentId - :ivar structured_inputs: The structured inputs to the response that can participate in prompt - template substitution or tool argument bindings. - :vartype structured_inputs: dict[str, any] + :ivar call_id: The unique ID of the shell tool call generated by the model. Required. + :vartype call_id: str + :ivar output: An array of shell call output contents. Required. + :vartype output: list[~azure.ai.projects.models.FunctionShellCallOutputContent] + :ivar max_output_length: Required. + :vartype max_output_length: int + :ivar created_by: The identifier of the actor that created the item. + :vartype created_by: str """ - metadata: dict[str, str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Set of 16 key-value pairs that can be attached to an object. This can be - useful for storing additional information about the object in a structured - format, and querying for objects via API or the dashboard. - Keys are strings with a maximum length of 64 characters. Values are strings - with a maximum length of 512 characters. Required.""" - temperature: float = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output - more random, while lower values like 0.2 will make it more focused and deterministic. - We generally recommend altering this or ``top_p`` but not both. Required.""" - top_p: float = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """An alternative to sampling with temperature, called nucleus sampling, - where the model considers the results of the tokens with top_p probability - mass. So 0.1 means only the tokens comprising the top 10% probability mass - are considered. We generally recommend altering this or ``temperature`` but not both. Required.""" - user: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """A unique identifier representing your end-user, which can help OpenAI to monitor and detect - abuse. `Learn more about safety best practices - `_. Required.""" - service_tier: Optional[Union[str, "_models.ServiceTier"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """Note: service_tier is not applicable to Azure OpenAI. Known values are: \"auto\", \"default\", - \"flex\", \"scale\", and \"priority\".""" - top_logprobs: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """An integer between 0 and 20 specifying the number of most likely tokens to return at each token - position, each with an associated log probability.""" - previous_response_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique ID of the previous response to the model. Use this to - create multi-turn conversations. Learn more about - `managing conversation state `_.""" - model: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The model deployment to use for the creation of this response.""" - reasoning: Optional["_models.Reasoning"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - background: Optional[bool] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Whether to run the model response in the background. - `Learn more about background responses `_.""" - max_output_tokens: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """An upper bound for the number of tokens that can be generated for a response, including visible - output tokens and `reasoning tokens `_.""" - max_tool_calls: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The maximum number of total calls to built-in tools that can be processed in a response. This - maximum number applies across all built-in tool calls, not per individual tool. Any further - attempts to call a tool by the model will be ignored.""" - text: Optional["_models.ResponseText"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Configuration options for a text response from the model. Can be plain - text or structured JSON data. See `Text inputs and outputs - `_ - and `Structured Outputs `_.""" - tools: Optional[list["_models.Tool"]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """An array of tools the model may call while generating a response. You - can specify which tool to use by setting the ``tool_choice`` parameter. - The two categories of tools you can provide the model are: - * **Built-in tools**: Tools that are provided by OpenAI that extend the - model's capabilities, like [web - search](https://platform.openai.com/docs/guides/tools-web-search) - or [file search](https://platform.openai.com/docs/guides/tools-file-search). Learn more about - [built-in tools](https://platform.openai.com/docs/guides/tools). - * **Function calls (custom tools)**: Functions that are defined by you, - enabling the model to call your own code. Learn more about - [function calling](https://platform.openai.com/docs/guides/function-calling).""" - tool_choice: Optional[Union[str, "_models.ToolChoiceOptions", "_models.ToolChoiceObject"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """How the model should select which tool (or tools) to use when generating - a response. See the ``tools`` parameter to see how to specify which tools - the model can call. Is either a Union[str, \"_models.ToolChoiceOptions\"] type or a - ToolChoiceObject type.""" - prompt: Optional["_models.Prompt"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - truncation: Optional[Literal["auto", "disabled"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The truncation strategy to use for the model response. - * `auto`: If the context of this response and previous ones exceeds - the model's context window size, the model will truncate the - response to fit the context window by dropping input items in the - middle of the conversation. - * `disabled` (default): If a model response will exceed the context window - size for a model, the request will fail with a 400 error. Is either a Literal[\"auto\"] type or - a Literal[\"disabled\"] type.""" + type: Literal[ItemResourceType.SHELL_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the shell call output. Always ``shell_call_output``. Required.""" id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Unique identifier for this Response. Required.""" - object: Literal["response"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The object type of this resource - always set to ``response``. Required. Default value is - \"response\".""" - status: Optional[Literal["completed", "failed", "in_progress", "cancelled", "queued", "incomplete"]] = rest_field( + """The unique ID of the shell call output. Populated when this item is returned via API. Required.""" + call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of the shell tool call generated by the model. Required.""" + output: list["_models.FunctionShellCallOutputContent"] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) - """The status of the response generation. One of ``completed``, ``failed``, - ``in_progress``, ``cancelled``, ``queued``, or ``incomplete``. Is one of the following types: - Literal[\"completed\"], Literal[\"failed\"], Literal[\"in_progress\"], Literal[\"cancelled\"], - Literal[\"queued\"], Literal[\"incomplete\"]""" - created_at: datetime.datetime = rest_field( - visibility=["read", "create", "update", "delete", "query"], format="unix-timestamp" - ) - """Unix timestamp (in seconds) of when this Response was created. Required.""" - error: "_models.ResponseError" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """An array of shell call output contents. Required.""" + max_output_length: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) """Required.""" - incomplete_details: "_models.ResponseIncompleteDetails1" = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """Details about why the response is incomplete. Required.""" - output: list["_models.ItemResource"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """An array of content items generated by the model. - * The length and order of items in the `output` array is dependent - on the model's response. - * Rather than accessing the first item in the `output` array and - assuming it's an `assistant` message with the content generated by - the model, you might consider using the `output_text` property where - supported in SDKs. Required.""" - instructions: Union[str, list["_models.ItemParam"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """A system (or developer) message inserted into the model's context. - When using along with ``previous_response_id``, the instructions from a previous - response will not be carried over to the next response. This makes it simple - to swap out system (or developer) messages in new responses. Required. Is either a str type or - a [ItemParam] type.""" - output_text: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """SDK-only convenience property that contains the aggregated text output - from all ``output_text`` items in the ``output`` array, if any are present. - Supported in the Python and JavaScript SDKs.""" - usage: Optional["_models.ResponseUsage"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - parallel_tool_calls: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Whether to allow the model to run tool calls in parallel. Required.""" - conversation: "_models.ResponseConversation1" = rest_field( + created_by: Optional[str] = rest_field( # pyright: ignore[reportIncompatibleVariableOverride] visibility=["read", "create", "update", "delete", "query"] ) - """Required.""" - agent: Optional["_models.AgentId"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The agent used for this response.""" - structured_inputs: Optional[dict[str, Any]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The structured inputs to the response that can participate in prompt template substitution or - tool argument bindings.""" + """The identifier of the actor that created the item.""" @overload - def __init__( # pylint: disable=too-many-locals + def __init__( self, *, - metadata: dict[str, str], - temperature: float, - top_p: float, - user: str, id: str, # pylint: disable=redefined-builtin - created_at: datetime.datetime, - error: "_models.ResponseError", - incomplete_details: "_models.ResponseIncompleteDetails1", - output: list["_models.ItemResource"], - instructions: Union[str, list["_models.ItemParam"]], - parallel_tool_calls: bool, - conversation: "_models.ResponseConversation1", - service_tier: Optional[Union[str, "_models.ServiceTier"]] = None, - top_logprobs: Optional[int] = None, - previous_response_id: Optional[str] = None, - model: Optional[str] = None, - reasoning: Optional["_models.Reasoning"] = None, - background: Optional[bool] = None, - max_output_tokens: Optional[int] = None, - max_tool_calls: Optional[int] = None, - text: Optional["_models.ResponseText"] = None, - tools: Optional[list["_models.Tool"]] = None, - tool_choice: Optional[Union[str, "_models.ToolChoiceOptions", "_models.ToolChoiceObject"]] = None, - prompt: Optional["_models.Prompt"] = None, - truncation: Optional[Literal["auto", "disabled"]] = None, - status: Optional[Literal["completed", "failed", "in_progress", "cancelled", "queued", "incomplete"]] = None, - output_text: Optional[str] = None, - usage: Optional["_models.ResponseUsage"] = None, - agent: Optional["_models.AgentId"] = None, - structured_inputs: Optional[dict[str, Any]] = None, + call_id: str, + output: list["_models.FunctionShellCallOutputContent"], + max_output_length: int, + created_by: Optional[str] = None, ) -> None: ... @overload @@ -10372,99 +8861,64 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.object: Literal["response"] = "response" + self.type = ItemResourceType.SHELL_CALL_OUTPUT # type: ignore -class ResponseStreamEvent(_Model): - """ResponseStreamEvent. +class ItemResourceFunctionToolCallOutputResource( + ItemResource, discriminator="function_call_output" +): # pylint: disable=name-too-long + """ItemResourceFunctionToolCallOutputResource. - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - ResponseErrorEvent, ResponseCodeInterpreterCallCompletedEvent, - ResponseCodeInterpreterCallInProgressEvent, ResponseCodeInterpreterCallInterpretingEvent, - ResponseCodeInterpreterCallCodeDeltaEvent, ResponseCodeInterpreterCallCodeDoneEvent, - ResponseCompletedEvent, ResponseContentPartAddedEvent, ResponseContentPartDoneEvent, - ResponseCreatedEvent, ResponseFailedEvent, ResponseFileSearchCallCompletedEvent, - ResponseFileSearchCallInProgressEvent, ResponseFileSearchCallSearchingEvent, - ResponseFunctionCallArgumentsDeltaEvent, ResponseFunctionCallArgumentsDoneEvent, - ResponseImageGenCallCompletedEvent, ResponseImageGenCallGeneratingEvent, - ResponseImageGenCallInProgressEvent, ResponseImageGenCallPartialImageEvent, - ResponseInProgressEvent, ResponseIncompleteEvent, ResponseMCPCallArgumentsDeltaEvent, - ResponseMCPCallArgumentsDoneEvent, ResponseMCPCallCompletedEvent, ResponseMCPCallFailedEvent, - ResponseMCPCallInProgressEvent, ResponseMCPListToolsCompletedEvent, - ResponseMCPListToolsFailedEvent, ResponseMCPListToolsInProgressEvent, - ResponseOutputItemAddedEvent, ResponseOutputItemDoneEvent, ResponseTextDeltaEvent, - ResponseTextDoneEvent, ResponseQueuedEvent, ResponseReasoningDeltaEvent, - ResponseReasoningDoneEvent, ResponseReasoningSummaryDeltaEvent, - ResponseReasoningSummaryDoneEvent, ResponseReasoningSummaryPartAddedEvent, - ResponseReasoningSummaryPartDoneEvent, ResponseReasoningSummaryTextDeltaEvent, - ResponseReasoningSummaryTextDoneEvent, ResponseRefusalDeltaEvent, ResponseRefusalDoneEvent, - ResponseWebSearchCallCompletedEvent, ResponseWebSearchCallInProgressEvent, - ResponseWebSearchCallSearchingEvent - - :ivar type: Required. Known values are: "response.audio.delta", "response.audio.done", - "response.audio_transcript.delta", "response.audio_transcript.done", - "response.code_interpreter_call_code.delta", "response.code_interpreter_call_code.done", - "response.code_interpreter_call.completed", "response.code_interpreter_call.in_progress", - "response.code_interpreter_call.interpreting", "response.completed", - "response.content_part.added", "response.content_part.done", "response.created", "error", - "response.file_search_call.completed", "response.file_search_call.in_progress", - "response.file_search_call.searching", "response.function_call_arguments.delta", - "response.function_call_arguments.done", "response.in_progress", "response.failed", - "response.incomplete", "response.output_item.added", "response.output_item.done", - "response.refusal.delta", "response.refusal.done", "response.output_text.annotation.added", - "response.output_text.delta", "response.output_text.done", - "response.reasoning_summary_part.added", "response.reasoning_summary_part.done", - "response.reasoning_summary_text.delta", "response.reasoning_summary_text.done", - "response.web_search_call.completed", "response.web_search_call.in_progress", - "response.web_search_call.searching", "response.image_generation_call.completed", - "response.image_generation_call.generating", "response.image_generation_call.in_progress", - "response.image_generation_call.partial_image", "response.mcp_call.arguments_delta", - "response.mcp_call.arguments_done", "response.mcp_call.completed", "response.mcp_call.failed", - "response.mcp_call.in_progress", "response.mcp_list_tools.completed", - "response.mcp_list_tools.failed", "response.mcp_list_tools.in_progress", "response.queued", - "response.reasoning.delta", "response.reasoning.done", "response.reasoning_summary.delta", and - "response.reasoning_summary.done". - :vartype type: str or ~azure.ai.projects.models.ResponseStreamEventType - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int + :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or + a str type. + :vartype created_by: ~azure.ai.projects.models.CreatedBy or str + :ivar id: The unique ID of the function tool call output. Populated when this item + is returned via API. + :vartype id: str + :ivar type: The type of the function tool call output. Always ``function_call_output``. + Required. + :vartype type: str or ~azure.ai.projects.models.FUNCTION_CALL_OUTPUT + :ivar call_id: The unique ID of the function tool call generated by the model. Required. + :vartype call_id: str + :ivar output: The output from the function call generated by your code. + Can be a string or an list of output content. Required. Is either a str type or a + [FunctionAndCustomToolCallOutput] type. + :vartype output: str or list[~azure.ai.projects.models.FunctionAndCustomToolCallOutput] + :ivar status: The status of the item. One of ``in_progress``, ``completed``, or + ``incomplete``. Populated when items are returned via API. Is one of the following types: + Literal["in_progress"], Literal["completed"], Literal["incomplete"] + :vartype status: str or str or str """ - __mapping__: dict[str, _Model] = {} - type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) - """Required. Known values are: \"response.audio.delta\", \"response.audio.done\", - \"response.audio_transcript.delta\", \"response.audio_transcript.done\", - \"response.code_interpreter_call_code.delta\", \"response.code_interpreter_call_code.done\", - \"response.code_interpreter_call.completed\", \"response.code_interpreter_call.in_progress\", - \"response.code_interpreter_call.interpreting\", \"response.completed\", - \"response.content_part.added\", \"response.content_part.done\", \"response.created\", - \"error\", \"response.file_search_call.completed\", \"response.file_search_call.in_progress\", - \"response.file_search_call.searching\", \"response.function_call_arguments.delta\", - \"response.function_call_arguments.done\", \"response.in_progress\", \"response.failed\", - \"response.incomplete\", \"response.output_item.added\", \"response.output_item.done\", - \"response.refusal.delta\", \"response.refusal.done\", - \"response.output_text.annotation.added\", \"response.output_text.delta\", - \"response.output_text.done\", \"response.reasoning_summary_part.added\", - \"response.reasoning_summary_part.done\", \"response.reasoning_summary_text.delta\", - \"response.reasoning_summary_text.done\", \"response.web_search_call.completed\", - \"response.web_search_call.in_progress\", \"response.web_search_call.searching\", - \"response.image_generation_call.completed\", \"response.image_generation_call.generating\", - \"response.image_generation_call.in_progress\", - \"response.image_generation_call.partial_image\", \"response.mcp_call.arguments_delta\", - \"response.mcp_call.arguments_done\", \"response.mcp_call.completed\", - \"response.mcp_call.failed\", \"response.mcp_call.in_progress\", - \"response.mcp_list_tools.completed\", \"response.mcp_list_tools.failed\", - \"response.mcp_list_tools.in_progress\", \"response.queued\", \"response.reasoning.delta\", - \"response.reasoning.done\", \"response.reasoning_summary.delta\", and - \"response.reasoning_summary.done\".""" - sequence_number: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The sequence number for this event. Required.""" + id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of the function tool call output. Populated when this item + is returned via API.""" + type: Literal[ItemResourceType.FUNCTION_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the function tool call output. Always ``function_call_output``. Required.""" + call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of the function tool call generated by the model. Required.""" + output: Union[str, list["_models.FunctionAndCustomToolCallOutput"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The output from the function call generated by your code. + Can be a string or an list of output content. Required. Is either a str type or a + [FunctionAndCustomToolCallOutput] type.""" + status: Optional[Literal["in_progress", "completed", "incomplete"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The status of the item. One of ``in_progress``, ``completed``, or + ``incomplete``. Populated when items are returned via API. Is one of the following types: + Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"]""" @overload def __init__( self, *, - type: str, - sequence_number: int, + call_id: str, + output: Union[str, list["_models.FunctionAndCustomToolCallOutput"]], + created_by: Optional[Union["_models.CreatedBy", str]] = None, + id: Optional[str] = None, # pylint: disable=redefined-builtin + status: Optional[Literal["in_progress", "completed", "incomplete"]] = None, ) -> None: ... @overload @@ -10476,44 +8930,58 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) + self.type = ItemResourceType.FUNCTION_CALL_OUTPUT # type: ignore -class ResponseCodeInterpreterCallCodeDeltaEvent( - ResponseStreamEvent, discriminator="response.code_interpreter_call_code.delta" -): # pylint: disable=name-too-long - """Emitted when a partial code snippet is streamed by the code interpreter. +class ItemResourceFunctionToolCallResource(ItemResource, discriminator="function_call"): + """ItemResourceFunctionToolCallResource. - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always ``response.code_interpreter_call_code.delta``. - Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_CODE_INTERPRETER_CALL_CODE_DELTA - :ivar output_index: The index of the output item in the response for which the code is being - streamed. Required. - :vartype output_index: int - :ivar item_id: The unique identifier of the code interpreter tool call item. Required. - :vartype item_id: str - :ivar delta: The partial code snippet being streamed by the code interpreter. Required. - :vartype delta: str + :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or + a str type. + :vartype created_by: ~azure.ai.projects.models.CreatedBy or str + :ivar id: The unique ID of the function tool call. + :vartype id: str + :ivar type: The type of the function tool call. Always ``function_call``. Required. + :vartype type: str or ~azure.ai.projects.models.FUNCTION_CALL + :ivar call_id: The unique ID of the function tool call generated by the model. Required. + :vartype call_id: str + :ivar name: The name of the function to run. Required. + :vartype name: str + :ivar arguments: A JSON string of the arguments to pass to the function. Required. + :vartype arguments: str + :ivar status: The status of the item. One of ``in_progress``, ``completed``, or + ``incomplete``. Populated when items are returned via API. Is one of the following types: + Literal["in_progress"], Literal["completed"], Literal["incomplete"] + :vartype status: str or str or str """ - type: Literal[ResponseStreamEventType.RESPONSE_CODE_INTERPRETER_CALL_CODE_DELTA] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always ``response.code_interpreter_call_code.delta``. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item in the response for which the code is being streamed. Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique identifier of the code interpreter tool call item. Required.""" - delta: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The partial code snippet being streamed by the code interpreter. Required.""" + id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of the function tool call.""" + type: Literal[ItemResourceType.FUNCTION_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the function tool call. Always ``function_call``. Required.""" + call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of the function tool call generated by the model. Required.""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the function to run. Required.""" + arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A JSON string of the arguments to pass to the function. Required.""" + status: Optional[Literal["in_progress", "completed", "incomplete"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The status of the item. One of ``in_progress``, ``completed``, or + ``incomplete``. Populated when items are returned via API. Is one of the following types: + Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"]""" @overload def __init__( self, *, - sequence_number: int, - output_index: int, - item_id: str, - delta: str, + call_id: str, + name: str, + arguments: str, + created_by: Optional[Union["_models.CreatedBy", str]] = None, + id: Optional[str] = None, # pylint: disable=redefined-builtin + status: Optional[Literal["in_progress", "completed", "incomplete"]] = None, ) -> None: ... @overload @@ -10525,45 +8993,46 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_CODE_INTERPRETER_CALL_CODE_DELTA # type: ignore + self.type = ItemResourceType.FUNCTION_CALL # type: ignore -class ResponseCodeInterpreterCallCodeDoneEvent( - ResponseStreamEvent, discriminator="response.code_interpreter_call_code.done" -): - """Emitted when the code snippet is finalized by the code interpreter. +class ItemResourceImageGenToolCall(ItemResource, discriminator="image_generation_call"): + """Image generation call. - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always ``response.code_interpreter_call_code.done``. - Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_CODE_INTERPRETER_CALL_CODE_DONE - :ivar output_index: The index of the output item in the response for which the code is - finalized. Required. - :vartype output_index: int - :ivar item_id: The unique identifier of the code interpreter tool call item. Required. - :vartype item_id: str - :ivar code: The final code snippet output by the code interpreter. Required. - :vartype code: str + :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or + a str type. + :vartype created_by: ~azure.ai.projects.models.CreatedBy or str + :ivar type: The type of the image generation call. Always ``image_generation_call``. Required. + :vartype type: str or ~azure.ai.projects.models.IMAGE_GENERATION_CALL + :ivar id: The unique ID of the image generation call. Required. + :vartype id: str + :ivar status: The status of the image generation call. Required. Is one of the following types: + Literal["in_progress"], Literal["completed"], Literal["generating"], Literal["failed"] + :vartype status: str or str or str or str + :ivar result: Required. + :vartype result: str """ - type: Literal[ResponseStreamEventType.RESPONSE_CODE_INTERPRETER_CALL_CODE_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always ``response.code_interpreter_call_code.done``. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item in the response for which the code is finalized. Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique identifier of the code interpreter tool call item. Required.""" - code: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The final code snippet output by the code interpreter. Required.""" + type: Literal[ItemResourceType.IMAGE_GENERATION_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the image generation call. Always ``image_generation_call``. Required.""" + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of the image generation call. Required.""" + status: Literal["in_progress", "completed", "generating", "failed"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The status of the image generation call. Required. Is one of the following types: + Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"generating\"], Literal[\"failed\"]""" + result: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required.""" @overload def __init__( self, *, - sequence_number: int, - output_index: int, - item_id: str, - code: str, + id: str, # pylint: disable=redefined-builtin + status: Literal["in_progress", "completed", "generating", "failed"], + result: str, + created_by: Optional[Union["_models.CreatedBy", str]] = None, ) -> None: ... @overload @@ -10575,41 +9044,51 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_CODE_INTERPRETER_CALL_CODE_DONE # type: ignore + self.type = ItemResourceType.IMAGE_GENERATION_CALL # type: ignore -class ResponseCodeInterpreterCallCompletedEvent( - ResponseStreamEvent, discriminator="response.code_interpreter_call.completed" -): # pylint: disable=name-too-long - """Emitted when the code interpreter call is completed. +class ItemResourceLocalShellToolCall(ItemResource, discriminator="local_shell_call"): + """Local shell call. - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always ``response.code_interpreter_call.completed``. - Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_CODE_INTERPRETER_CALL_COMPLETED - :ivar output_index: The index of the output item in the response for which the code interpreter - call is completed. Required. - :vartype output_index: int - :ivar item_id: The unique identifier of the code interpreter tool call item. Required. - :vartype item_id: str - """ - - type: Literal[ResponseStreamEventType.RESPONSE_CODE_INTERPRETER_CALL_COMPLETED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always ``response.code_interpreter_call.completed``. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item in the response for which the code interpreter call is completed. - Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique identifier of the code interpreter tool call item. Required.""" + :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or + a str type. + :vartype created_by: ~azure.ai.projects.models.CreatedBy or str + :ivar type: The type of the local shell call. Always ``local_shell_call``. Required. + :vartype type: str or ~azure.ai.projects.models.LOCAL_SHELL_CALL + :ivar id: The unique ID of the local shell call. Required. + :vartype id: str + :ivar call_id: The unique ID of the local shell tool call generated by the model. Required. + :vartype call_id: str + :ivar action: Required. + :vartype action: ~azure.ai.projects.models.LocalShellExecAction + :ivar status: The status of the local shell call. Required. Is one of the following types: + Literal["in_progress"], Literal["completed"], Literal["incomplete"] + :vartype status: str or str or str + """ + + type: Literal[ItemResourceType.LOCAL_SHELL_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the local shell call. Always ``local_shell_call``. Required.""" + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of the local shell call. Required.""" + call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of the local shell tool call generated by the model. Required.""" + action: "_models.LocalShellExecAction" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required.""" + status: Literal["in_progress", "completed", "incomplete"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The status of the local shell call. Required. Is one of the following types: + Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"]""" @overload def __init__( self, *, - sequence_number: int, - output_index: int, - item_id: str, + id: str, # pylint: disable=redefined-builtin + call_id: str, + action: "_models.LocalShellExecAction", + status: Literal["in_progress", "completed", "incomplete"], + created_by: Optional[Union["_models.CreatedBy", str]] = None, ) -> None: ... @overload @@ -10621,41 +9100,47 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_CODE_INTERPRETER_CALL_COMPLETED # type: ignore + self.type = ItemResourceType.LOCAL_SHELL_CALL # type: ignore -class ResponseCodeInterpreterCallInProgressEvent( - ResponseStreamEvent, discriminator="response.code_interpreter_call.in_progress" -): # pylint: disable=name-too-long - """Emitted when a code interpreter call is in progress. +class ItemResourceLocalShellToolCallOutput(ItemResource, discriminator="local_shell_call_output"): + """Local shell call output. - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always ``response.code_interpreter_call.in_progress``. + :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or + a str type. + :vartype created_by: ~azure.ai.projects.models.CreatedBy or str + :ivar type: The type of the local shell tool call output. Always ``local_shell_call_output``. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_CODE_INTERPRETER_CALL_IN_PROGRESS - :ivar output_index: The index of the output item in the response for which the code interpreter - call is in progress. Required. - :vartype output_index: int - :ivar item_id: The unique identifier of the code interpreter tool call item. Required. - :vartype item_id: str + :vartype type: str or ~azure.ai.projects.models.LOCAL_SHELL_CALL_OUTPUT + :ivar id: The unique ID of the local shell tool call generated by the model. Required. + :vartype id: str + :ivar output: A JSON string of the output of the local shell tool call. Required. + :vartype output: str + :ivar status: Is one of the following types: Literal["in_progress"], Literal["completed"], + Literal["incomplete"] + :vartype status: str or str or str """ - type: Literal[ResponseStreamEventType.RESPONSE_CODE_INTERPRETER_CALL_IN_PROGRESS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always ``response.code_interpreter_call.in_progress``. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item in the response for which the code interpreter call is in - progress. Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique identifier of the code interpreter tool call item. Required.""" + type: Literal[ItemResourceType.LOCAL_SHELL_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the local shell tool call output. Always ``local_shell_call_output``. Required.""" + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of the local shell tool call generated by the model. Required.""" + output: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A JSON string of the output of the local shell tool call. Required.""" + status: Optional[Literal["in_progress", "completed", "incomplete"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Is one of the following types: Literal[\"in_progress\"], Literal[\"completed\"], + Literal[\"incomplete\"]""" @overload def __init__( self, *, - sequence_number: int, - output_index: int, - item_id: str, + id: str, # pylint: disable=redefined-builtin + output: str, + created_by: Optional[Union["_models.CreatedBy", str]] = None, + status: Optional[Literal["in_progress", "completed", "incomplete"]] = None, ) -> None: ... @overload @@ -10667,41 +9152,98 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_CODE_INTERPRETER_CALL_IN_PROGRESS # type: ignore + self.type = ItemResourceType.LOCAL_SHELL_CALL_OUTPUT # type: ignore -class ResponseCodeInterpreterCallInterpretingEvent( - ResponseStreamEvent, discriminator="response.code_interpreter_call.interpreting" -): # pylint: disable=name-too-long - """Emitted when the code interpreter is actively interpreting the code snippet. +class ItemResourceMcpApprovalRequest(ItemResource, discriminator="mcp_approval_request"): + """MCP approval request. - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always ``response.code_interpreter_call.interpreting``. - Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_CODE_INTERPRETER_CALL_INTERPRETING - :ivar output_index: The index of the output item in the response for which the code interpreter - is interpreting code. Required. - :vartype output_index: int - :ivar item_id: The unique identifier of the code interpreter tool call item. Required. - :vartype item_id: str + :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or + a str type. + :vartype created_by: ~azure.ai.projects.models.CreatedBy or str + :ivar type: The type of the item. Always ``mcp_approval_request``. Required. + :vartype type: str or ~azure.ai.projects.models.MCP_APPROVAL_REQUEST + :ivar id: The unique ID of the approval request. Required. + :vartype id: str + :ivar server_label: The label of the MCP server making the request. Required. + :vartype server_label: str + :ivar name: The name of the tool to run. Required. + :vartype name: str + :ivar arguments: A JSON string of arguments for the tool. Required. + :vartype arguments: str + """ + + type: Literal[ItemResourceType.MCP_APPROVAL_REQUEST] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the item. Always ``mcp_approval_request``. Required.""" + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of the approval request. Required.""" + server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The label of the MCP server making the request. Required.""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the tool to run. Required.""" + arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A JSON string of arguments for the tool. Required.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + server_label: str, + name: str, + arguments: str, + created_by: Optional[Union["_models.CreatedBy", str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemResourceType.MCP_APPROVAL_REQUEST # type: ignore + + +class ItemResourceMcpApprovalResponseResource(ItemResource, discriminator="mcp_approval_response"): + """MCP approval response. + + :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or + a str type. + :vartype created_by: ~azure.ai.projects.models.CreatedBy or str + :ivar type: The type of the item. Always ``mcp_approval_response``. Required. + :vartype type: str or ~azure.ai.projects.models.MCP_APPROVAL_RESPONSE + :ivar id: The unique ID of the approval response. Required. + :vartype id: str + :ivar approval_request_id: The ID of the approval request being answered. Required. + :vartype approval_request_id: str + :ivar approve: Whether the request was approved. Required. + :vartype approve: bool + :ivar reason: + :vartype reason: str """ - type: Literal[ResponseStreamEventType.RESPONSE_CODE_INTERPRETER_CALL_INTERPRETING] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always ``response.code_interpreter_call.interpreting``. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item in the response for which the code interpreter is interpreting - code. Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique identifier of the code interpreter tool call item. Required.""" + type: Literal[ItemResourceType.MCP_APPROVAL_RESPONSE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the item. Always ``mcp_approval_response``. Required.""" + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of the approval response. Required.""" + approval_request_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the approval request being answered. Required.""" + approve: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Whether the request was approved. Required.""" + reason: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) @overload def __init__( self, *, - sequence_number: int, - output_index: int, - item_id: str, + id: str, # pylint: disable=redefined-builtin + approval_request_id: str, + approve: bool, + created_by: Optional[Union["_models.CreatedBy", str]] = None, + reason: Optional[str] = None, ) -> None: ... @overload @@ -10713,31 +9255,46 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_CODE_INTERPRETER_CALL_INTERPRETING # type: ignore + self.type = ItemResourceType.MCP_APPROVAL_RESPONSE # type: ignore -class ResponseCompletedEvent(ResponseStreamEvent, discriminator="response.completed"): - """Emitted when the model response is complete. +class ItemResourceMcpListTools(ItemResource, discriminator="mcp_list_tools"): + """MCP list tools. - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always ``response.completed``. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_COMPLETED - :ivar response: Properties of the completed response. Required. - :vartype response: ~azure.ai.projects.models.Response + :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or + a str type. + :vartype created_by: ~azure.ai.projects.models.CreatedBy or str + :ivar type: The type of the item. Always ``mcp_list_tools``. Required. + :vartype type: str or ~azure.ai.projects.models.MCP_LIST_TOOLS + :ivar id: The unique ID of the list. Required. + :vartype id: str + :ivar server_label: The label of the MCP server. Required. + :vartype server_label: str + :ivar tools: The tools available on the server. Required. + :vartype tools: list[~azure.ai.projects.models.MCPListToolsTool] + :ivar error: + :vartype error: str """ - type: Literal[ResponseStreamEventType.RESPONSE_COMPLETED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always ``response.completed``. Required.""" - response: "_models.Response" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Properties of the completed response. Required.""" + type: Literal[ItemResourceType.MCP_LIST_TOOLS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the item. Always ``mcp_list_tools``. Required.""" + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of the list. Required.""" + server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The label of the MCP server. Required.""" + tools: list["_models.MCPListToolsTool"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The tools available on the server. Required.""" + error: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) @overload def __init__( self, *, - sequence_number: int, - response: "_models.Response", + id: str, # pylint: disable=redefined-builtin + server_label: str, + tools: list["_models.MCPListToolsTool"], + created_by: Optional[Union["_models.CreatedBy", str]] = None, + error: Optional[str] = None, ) -> None: ... @overload @@ -10749,46 +9306,70 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_COMPLETED # type: ignore + self.type = ItemResourceType.MCP_LIST_TOOLS # type: ignore -class ResponseContentPartAddedEvent(ResponseStreamEvent, discriminator="response.content_part.added"): - """Emitted when a new content part is added. +class ItemResourceMcpToolCall(ItemResource, discriminator="mcp_call"): + """MCP tool call. - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always ``response.content_part.added``. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_CONTENT_PART_ADDED - :ivar item_id: The ID of the output item that the content part was added to. Required. - :vartype item_id: str - :ivar output_index: The index of the output item that the content part was added to. Required. - :vartype output_index: int - :ivar content_index: The index of the content part that was added. Required. - :vartype content_index: int - :ivar part: The content part that was added. Required. - :vartype part: ~azure.ai.projects.models.ItemContent + :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or + a str type. + :vartype created_by: ~azure.ai.projects.models.CreatedBy or str + :ivar type: The type of the item. Always ``mcp_call``. Required. + :vartype type: str or ~azure.ai.projects.models.MCP_CALL + :ivar id: The unique ID of the tool call. Required. + :vartype id: str + :ivar server_label: The label of the MCP server running the tool. Required. + :vartype server_label: str + :ivar name: The name of the tool that was run. Required. + :vartype name: str + :ivar arguments: A JSON string of the arguments passed to the tool. Required. + :vartype arguments: str + :ivar output: + :vartype output: str + :ivar error: + :vartype error: str + :ivar status: The status of the tool call. One of ``in_progress``, ``completed``, + ``incomplete``, ``calling``, or ``failed``. Known values are: "in_progress", "completed", + "incomplete", "calling", and "failed". + :vartype status: str or ~azure.ai.projects.models.MCPToolCallStatus + :ivar approval_request_id: + :vartype approval_request_id: str """ - type: Literal[ResponseStreamEventType.RESPONSE_CONTENT_PART_ADDED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always ``response.content_part.added``. Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The ID of the output item that the content part was added to. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item that the content part was added to. Required.""" - content_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the content part that was added. Required.""" - part: "_models.ItemContent" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The content part that was added. Required.""" + type: Literal[ItemResourceType.MCP_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the item. Always ``mcp_call``. Required.""" + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of the tool call. Required.""" + server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The label of the MCP server running the tool. Required.""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the tool that was run. Required.""" + arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A JSON string of the arguments passed to the tool. Required.""" + output: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + error: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + status: Optional[Union[str, "_models.MCPToolCallStatus"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The status of the tool call. One of ``in_progress``, ``completed``, ``incomplete``, + ``calling``, or ``failed``. Known values are: \"in_progress\", \"completed\", \"incomplete\", + \"calling\", and \"failed\".""" + approval_request_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) @overload def __init__( self, *, - sequence_number: int, - item_id: str, - output_index: int, - content_index: int, - part: "_models.ItemContent", + id: str, # pylint: disable=redefined-builtin + server_label: str, + name: str, + arguments: str, + created_by: Optional[Union["_models.CreatedBy", str]] = None, + output: Optional[str] = None, + error: Optional[str] = None, + status: Optional[Union[str, "_models.MCPToolCallStatus"]] = None, + approval_request_id: Optional[str] = None, ) -> None: ... @overload @@ -10800,46 +9381,55 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_CONTENT_PART_ADDED # type: ignore + self.type = ItemResourceType.MCP_CALL # type: ignore -class ResponseContentPartDoneEvent(ResponseStreamEvent, discriminator="response.content_part.done"): - """Emitted when a content part is done. +class ItemResourceOutputMessage(ItemResource, discriminator="output_message"): + """Output message. - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always ``response.content_part.done``. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_CONTENT_PART_DONE - :ivar item_id: The ID of the output item that the content part was added to. Required. - :vartype item_id: str - :ivar output_index: The index of the output item that the content part was added to. Required. - :vartype output_index: int - :ivar content_index: The index of the content part that is done. Required. - :vartype content_index: int - :ivar part: The content part that is done. Required. - :vartype part: ~azure.ai.projects.models.ItemContent + :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or + a str type. + :vartype created_by: ~azure.ai.projects.models.CreatedBy or str + :ivar id: The unique ID of the output message. Required. + :vartype id: str + :ivar type: The type of the output message. Always ``message``. Required. + :vartype type: str or ~azure.ai.projects.models.OUTPUT_MESSAGE + :ivar role: The role of the output message. Always ``assistant``. Required. Default value is + "assistant". + :vartype role: str + :ivar content: The content of the output message. Required. + :vartype content: list[~azure.ai.projects.models.OutputMessageContent] + :ivar status: The status of the message input. One of ``in_progress``, ``completed``, or + ``incomplete``. Populated when input items are returned via API. Required. Is one of the + following types: Literal["in_progress"], Literal["completed"], Literal["incomplete"] + :vartype status: str or str or str """ - type: Literal[ResponseStreamEventType.RESPONSE_CONTENT_PART_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always ``response.content_part.done``. Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The ID of the output item that the content part was added to. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item that the content part was added to. Required.""" - content_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the content part that is done. Required.""" - part: "_models.ItemContent" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The content part that is done. Required.""" + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of the output message. Required.""" + type: Literal[ItemResourceType.OUTPUT_MESSAGE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the output message. Always ``message``. Required.""" + role: Literal["assistant"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The role of the output message. Always ``assistant``. Required. Default value is \"assistant\".""" + content: list["_models.OutputMessageContent"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The content of the output message. Required.""" + status: Literal["in_progress", "completed", "incomplete"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The status of the message input. One of ``in_progress``, ``completed``, or + ``incomplete``. Populated when input items are returned via API. Required. Is one of the + following types: Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"]""" @overload def __init__( self, *, - sequence_number: int, - item_id: str, - output_index: int, - content_index: int, - part: "_models.ItemContent", + id: str, # pylint: disable=redefined-builtin + content: list["_models.OutputMessageContent"], + status: Literal["in_progress", "completed", "incomplete"], + created_by: Optional[Union["_models.CreatedBy", str]] = None, ) -> None: ... @overload @@ -10851,24 +9441,57 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_CONTENT_PART_DONE # type: ignore + self.type = ItemResourceType.OUTPUT_MESSAGE # type: ignore + self.role: Literal["assistant"] = "assistant" -class ResponseConversation1(_Model): - """ResponseConversation1. +class ItemResourceWebSearchToolCall(ItemResource, discriminator="web_search_call"): + """Web search tool call. - :ivar id: Required. + :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or + a str type. + :vartype created_by: ~azure.ai.projects.models.CreatedBy or str + :ivar id: The unique ID of the web search tool call. Required. :vartype id: str + :ivar type: The type of the web search tool call. Always ``web_search_call``. Required. + :vartype type: str or ~azure.ai.projects.models.WEB_SEARCH_CALL + :ivar status: The status of the web search tool call. Required. Is one of the following types: + Literal["in_progress"], Literal["searching"], Literal["completed"], Literal["failed"] + :vartype status: str or str or str or str + :ivar action: An object describing the specific action taken in this web search call. + Includes details on how the model used the web (search, open_page, find). Required. Is one of + the following types: WebSearchActionSearch, WebSearchActionOpenPage, WebSearchActionFind + :vartype action: ~azure.ai.projects.models.WebSearchActionSearch or + ~azure.ai.projects.models.WebSearchActionOpenPage or + ~azure.ai.projects.models.WebSearchActionFind """ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Required.""" + """The unique ID of the web search tool call. Required.""" + type: Literal[ItemResourceType.WEB_SEARCH_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the web search tool call. Always ``web_search_call``. Required.""" + status: Literal["in_progress", "searching", "completed", "failed"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The status of the web search tool call. Required. Is one of the following types: + Literal[\"in_progress\"], Literal[\"searching\"], Literal[\"completed\"], Literal[\"failed\"]""" + action: Union["_models.WebSearchActionSearch", "_models.WebSearchActionOpenPage", "_models.WebSearchActionFind"] = ( + rest_field(visibility=["read", "create", "update", "delete", "query"]) + ) + """An object describing the specific action taken in this web search call. + Includes details on how the model used the web (search, open_page, find). Required. Is one of + the following types: WebSearchActionSearch, WebSearchActionOpenPage, WebSearchActionFind""" @overload def __init__( self, *, id: str, # pylint: disable=redefined-builtin + status: Literal["in_progress", "searching", "completed", "failed"], + action: Union[ + "_models.WebSearchActionSearch", "_models.WebSearchActionOpenPage", "_models.WebSearchActionFind" + ], + created_by: Optional[Union["_models.CreatedBy", str]] = None, ) -> None: ... @overload @@ -10880,30 +9503,34 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) + self.type = ItemResourceType.WEB_SEARCH_CALL # type: ignore -class ResponseCreatedEvent(ResponseStreamEvent, discriminator="response.created"): - """An event that is emitted when a response is created. +class KeyPressAction(ComputerAction, discriminator="keypress"): + """KeyPress. - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always ``response.created``. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_CREATED - :ivar response: The response that was created. Required. - :vartype response: ~azure.ai.projects.models.Response + :ivar type: Specifies the event type. For a keypress action, this property is always set to + ``keypress``. Required. + :vartype type: str or ~azure.ai.projects.models.KEYPRESS + :ivar keys_property: The combination of keys the model is requesting to be pressed. This is an + array of strings, each representing a key. Required. + :vartype keys_property: list[str] """ - type: Literal[ResponseStreamEventType.RESPONSE_CREATED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always ``response.created``. Required.""" - response: "_models.Response" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The response that was created. Required.""" + type: Literal[ComputerActionType.KEYPRESS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Specifies the event type. For a keypress action, this property is always set to ``keypress``. + Required.""" + keys_property: list[str] = rest_field( + name="keys", visibility=["read", "create", "update", "delete", "query"], original_tsp_name="keys" + ) + """The combination of keys the model is requesting to be pressed. This is an array of strings, + each representing a key. Required.""" @overload def __init__( self, *, - sequence_number: int, - response: "_models.Response", + keys_property: list[str], ) -> None: ... @overload @@ -10915,41 +9542,46 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_CREATED # type: ignore + self.type = ComputerActionType.KEYPRESS # type: ignore -class ResponseError(_Model): - """An error object returned when the model fails to generate a Response. +class LocalShellExecAction(_Model): + """Local shell exec action. - :ivar code: Required. Known values are: "server_error", "rate_limit_exceeded", - "invalid_prompt", "vector_store_timeout", "invalid_image", "invalid_image_format", - "invalid_base64_image", "invalid_image_url", "image_too_large", "image_too_small", - "image_parse_error", "image_content_policy_violation", "invalid_image_mode", - "image_file_too_large", "unsupported_image_media_type", "empty_image_file", - "failed_to_download_image", and "image_file_not_found". - :vartype code: str or ~azure.ai.projects.models.ResponseErrorCode - :ivar message: A human-readable description of the error. Required. - :vartype message: str + :ivar type: The type of the local shell action. Always ``exec``. Required. Default value is + "exec". + :vartype type: str + :ivar command: The command to run. Required. + :vartype command: list[str] + :ivar timeout_ms: + :vartype timeout_ms: int + :ivar working_directory: + :vartype working_directory: str + :ivar env: Environment variables to set for the command. Required. + :vartype env: dict[str, str] + :ivar user: + :vartype user: str """ - code: Union[str, "_models.ResponseErrorCode"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """Required. Known values are: \"server_error\", \"rate_limit_exceeded\", \"invalid_prompt\", - \"vector_store_timeout\", \"invalid_image\", \"invalid_image_format\", - \"invalid_base64_image\", \"invalid_image_url\", \"image_too_large\", \"image_too_small\", - \"image_parse_error\", \"image_content_policy_violation\", \"invalid_image_mode\", - \"image_file_too_large\", \"unsupported_image_media_type\", \"empty_image_file\", - \"failed_to_download_image\", and \"image_file_not_found\".""" - message: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """A human-readable description of the error. Required.""" + type: Literal["exec"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The type of the local shell action. Always ``exec``. Required. Default value is \"exec\".""" + command: list[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The command to run. Required.""" + timeout_ms: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + working_directory: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + env: dict[str, str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Environment variables to set for the command. Required.""" + user: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) @overload def __init__( self, *, - code: Union[str, "_models.ResponseErrorCode"], - message: str, + command: list[str], + env: dict[str, str], + timeout_ms: Optional[int] = None, + working_directory: Optional[str] = None, + user: Optional[str] = None, ) -> None: ... @overload @@ -10961,40 +9593,22 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) + self.type: Literal["exec"] = "exec" -class ResponseErrorEvent(ResponseStreamEvent, discriminator="error"): - """Emitted when an error occurs. +class LocalShellToolParam(Tool, discriminator="local_shell"): + """Local shell tool. - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always ``error``. Required. - :vartype type: str or ~azure.ai.projects.models.ERROR - :ivar code: The error code. Required. - :vartype code: str - :ivar message: The error message. Required. - :vartype message: str - :ivar param: The error parameter. Required. - :vartype param: str + :ivar type: The type of the local shell tool. Always ``local_shell``. Required. + :vartype type: str or ~azure.ai.projects.models.LOCAL_SHELL """ - type: Literal[ResponseStreamEventType.ERROR] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always ``error``. Required.""" - code: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The error code. Required.""" - message: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The error message. Required.""" - param: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The error parameter. Required.""" + type: Literal[ToolType.LOCAL_SHELL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the local shell tool. Always ``local_shell``. Required.""" @overload def __init__( self, - *, - sequence_number: int, - code: str, - message: str, - param: str, ) -> None: ... @overload @@ -11006,31 +9620,39 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.ERROR # type: ignore + self.type = ToolType.LOCAL_SHELL # type: ignore -class ResponseFailedEvent(ResponseStreamEvent, discriminator="response.failed"): - """An event that is emitted when a response fails. +class LogProb(_Model): + """Log probability. - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always ``response.failed``. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_FAILED - :ivar response: The response that failed. Required. - :vartype response: ~azure.ai.projects.models.Response + :ivar token: Required. + :vartype token: str + :ivar logprob: Required. + :vartype logprob: float + :ivar bytes: Required. + :vartype bytes: list[int] + :ivar top_logprobs: Required. + :vartype top_logprobs: list[~azure.ai.projects.models.TopLogProb] """ - type: Literal[ResponseStreamEventType.RESPONSE_FAILED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always ``response.failed``. Required.""" - response: "_models.Response" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The response that failed. Required.""" + token: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required.""" + logprob: float = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required.""" + bytes: list[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required.""" + top_logprobs: list["_models.TopLogProb"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required.""" @overload def __init__( self, *, - sequence_number: int, - response: "_models.Response", + token: str, + logprob: float, + bytes: list[int], + top_logprobs: list["_models.TopLogProb"], ) -> None: ... @overload @@ -11042,37 +9664,39 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_FAILED # type: ignore -class ResponseFileSearchCallCompletedEvent(ResponseStreamEvent, discriminator="response.file_search_call.completed"): - """Emitted when a file search call is completed (results found). +class ManagedAzureAISearchIndex(Index, discriminator="ManagedAzureSearch"): + """Managed Azure AI Search Index Definition. - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always ``response.file_search_call.completed``. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_FILE_SEARCH_CALL_COMPLETED - :ivar output_index: The index of the output item that the file search call is initiated. - Required. - :vartype output_index: int - :ivar item_id: The ID of the output item that the file search call is initiated. Required. - :vartype item_id: str + :ivar id: Asset ID, a unique identifier for the asset. + :vartype id: str + :ivar name: The name of the resource. Required. + :vartype name: str + :ivar version: The version of the resource. Required. + :vartype version: str + :ivar description: The asset description text. + :vartype description: str + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar type: Type of index. Required. Managed Azure Search + :vartype type: str or ~azure.ai.projects.models.MANAGED_AZURE_SEARCH + :ivar vector_store_id: Vector store id of managed index. Required. + :vartype vector_store_id: str """ - type: Literal[ResponseStreamEventType.RESPONSE_FILE_SEARCH_CALL_COMPLETED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always ``response.file_search_call.completed``. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item that the file search call is initiated. Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The ID of the output item that the file search call is initiated. Required.""" + type: Literal[IndexType.MANAGED_AZURE_SEARCH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Type of index. Required. Managed Azure Search""" + vector_store_id: str = rest_field(name="vectorStoreId", visibility=["create"]) + """Vector store id of managed index. Required.""" @overload def __init__( self, *, - sequence_number: int, - output_index: int, - item_id: str, + vector_store_id: str, + description: Optional[str] = None, + tags: Optional[dict[str, str]] = None, ) -> None: ... @overload @@ -11084,37 +9708,41 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_FILE_SEARCH_CALL_COMPLETED # type: ignore + self.type = IndexType.MANAGED_AZURE_SEARCH # type: ignore -class ResponseFileSearchCallInProgressEvent(ResponseStreamEvent, discriminator="response.file_search_call.in_progress"): - """Emitted when a file search call is initiated. +class MCPListToolsTool(_Model): + """MCP list tools tool. - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always ``response.file_search_call.in_progress``. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_FILE_SEARCH_CALL_IN_PROGRESS - :ivar output_index: The index of the output item that the file search call is initiated. - Required. - :vartype output_index: int - :ivar item_id: The ID of the output item that the file search call is initiated. Required. - :vartype item_id: str + :ivar name: The name of the tool. Required. + :vartype name: str + :ivar description: + :vartype description: str + :ivar input_schema: The JSON schema describing the tool's input. Required. + :vartype input_schema: ~azure.ai.projects.models.MCPListToolsToolInputSchema + :ivar annotations: + :vartype annotations: ~azure.ai.projects.models.MCPListToolsToolAnnotations """ - type: Literal[ResponseStreamEventType.RESPONSE_FILE_SEARCH_CALL_IN_PROGRESS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always ``response.file_search_call.in_progress``. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item that the file search call is initiated. Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The ID of the output item that the file search call is initiated. Required.""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the tool. Required.""" + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + input_schema: "_models.MCPListToolsToolInputSchema" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The JSON schema describing the tool's input. Required.""" + annotations: Optional["_models.MCPListToolsToolAnnotations"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) @overload def __init__( self, *, - sequence_number: int, - output_index: int, - item_id: str, + name: str, + input_schema: "_models.MCPListToolsToolInputSchema", + description: Optional[str] = None, + annotations: Optional["_models.MCPListToolsToolAnnotations"] = None, ) -> None: ... @overload @@ -11126,37 +9754,144 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_FILE_SEARCH_CALL_IN_PROGRESS # type: ignore -class ResponseFileSearchCallSearchingEvent(ResponseStreamEvent, discriminator="response.file_search_call.searching"): - """Emitted when a file search is currently searching. +class MCPListToolsToolAnnotations(_Model): + """MCPListToolsToolAnnotations.""" - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always ``response.file_search_call.searching``. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_FILE_SEARCH_CALL_SEARCHING - :ivar output_index: The index of the output item that the file search call is searching. - Required. - :vartype output_index: int - :ivar item_id: The ID of the output item that the file search call is initiated. Required. - :vartype item_id: str + +class MCPListToolsToolInputSchema(_Model): + """MCPListToolsToolInputSchema.""" + + +class MCPTool(Tool, discriminator="mcp"): + """MCP tool. + + :ivar type: The type of the MCP tool. Always ``mcp``. Required. + :vartype type: str or ~azure.ai.projects.models.MCP + :ivar server_label: A label for this MCP server, used to identify it in tool calls. Required. + :vartype server_label: str + :ivar server_url: The URL for the MCP server. One of ``server_url`` or ``connector_id`` must be + provided. + :vartype server_url: str + :ivar connector_id: Identifier for service connectors, like those available in ChatGPT. One of + ``server_url`` or ``connector_id`` must be provided. Learn more about service + connectors `here + `_. + Currently supported ``connector_id`` values are: + + * Dropbox: `connector_dropbox` + * Gmail: `connector_gmail` + * Google Calendar: `connector_googlecalendar` + * Google Drive: `connector_googledrive` + * Microsoft Teams: `connector_microsoftteams` + * Outlook Calendar: `connector_outlookcalendar` + * Outlook Email: `connector_outlookemail` + * SharePoint: `connector_sharepoint`. Is one of the following types: + Literal["connector_dropbox"], Literal["connector_gmail"], Literal["connector_googlecalendar"], + Literal["connector_googledrive"], Literal["connector_microsoftteams"], + Literal["connector_outlookcalendar"], Literal["connector_outlookemail"], + Literal["connector_sharepoint"] + :vartype connector_id: str or str or str or str or str or str or str or str + :ivar authorization: An OAuth access token that can be used with a remote MCP server, either + with a custom MCP server URL or a service connector. Your application + must handle the OAuth authorization flow and provide the token here. + :vartype authorization: str + :ivar server_description: Optional description of the MCP server, used to provide more context. + :vartype server_description: str + :ivar headers: + :vartype headers: dict[str, str] + :ivar allowed_tools: Is either a [str] type or a MCPToolFilter type. + :vartype allowed_tools: list[str] or ~azure.ai.projects.models.MCPToolFilter + :ivar require_approval: Is one of the following types: MCPToolRequireApproval, + Literal["always"], Literal["never"] + :vartype require_approval: ~azure.ai.projects.models.MCPToolRequireApproval or str or str + :ivar project_connection_id: The connection ID in the project for the MCP server. The + connection stores authentication and other connection details needed to connect to the MCP + server. + :vartype project_connection_id: str """ - type: Literal[ResponseStreamEventType.RESPONSE_FILE_SEARCH_CALL_SEARCHING] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always ``response.file_search_call.searching``. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item that the file search call is searching. Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The ID of the output item that the file search call is initiated. Required.""" + type: Literal[ToolType.MCP] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the MCP tool. Always ``mcp``. Required.""" + server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A label for this MCP server, used to identify it in tool calls. Required.""" + server_url: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The URL for the MCP server. One of ``server_url`` or ``connector_id`` must be + provided.""" + connector_id: Optional[ + Literal[ + "connector_dropbox", + "connector_gmail", + "connector_googlecalendar", + "connector_googledrive", + "connector_microsoftteams", + "connector_outlookcalendar", + "connector_outlookemail", + "connector_sharepoint", + ] + ] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Identifier for service connectors, like those available in ChatGPT. One of + ``server_url`` or ``connector_id`` must be provided. Learn more about service + connectors `here + `_. + Currently supported ``connector_id`` values are: + + * Dropbox: `connector_dropbox` + * Gmail: `connector_gmail` + * Google Calendar: `connector_googlecalendar` + * Google Drive: `connector_googledrive` + * Microsoft Teams: `connector_microsoftteams` + * Outlook Calendar: `connector_outlookcalendar` + * Outlook Email: `connector_outlookemail` + * SharePoint: `connector_sharepoint`. Is one of the following types: + Literal[\"connector_dropbox\"], Literal[\"connector_gmail\"], + Literal[\"connector_googlecalendar\"], Literal[\"connector_googledrive\"], + Literal[\"connector_microsoftteams\"], Literal[\"connector_outlookcalendar\"], + Literal[\"connector_outlookemail\"], Literal[\"connector_sharepoint\"]""" + authorization: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """An OAuth access token that can be used with a remote MCP server, either + with a custom MCP server URL or a service connector. Your application + must handle the OAuth authorization flow and provide the token here.""" + server_description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Optional description of the MCP server, used to provide more context.""" + headers: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + allowed_tools: Optional[Union[list[str], "_models.MCPToolFilter"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Is either a [str] type or a MCPToolFilter type.""" + require_approval: Optional[Union["_models.MCPToolRequireApproval", Literal["always"], Literal["never"]]] = ( + rest_field(visibility=["read", "create", "update", "delete", "query"]) + ) + """Is one of the following types: MCPToolRequireApproval, Literal[\"always\"], Literal[\"never\"]""" + project_connection_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The connection ID in the project for the MCP server. The connection stores authentication and + other connection details needed to connect to the MCP server.""" @overload def __init__( self, *, - sequence_number: int, - output_index: int, - item_id: str, + server_label: str, + server_url: Optional[str] = None, + connector_id: Optional[ + Literal[ + "connector_dropbox", + "connector_gmail", + "connector_googlecalendar", + "connector_googledrive", + "connector_microsoftteams", + "connector_outlookcalendar", + "connector_outlookemail", + "connector_sharepoint", + ] + ] = None, + authorization: Optional[str] = None, + server_description: Optional[str] = None, + headers: Optional[dict[str, str]] = None, + allowed_tools: Optional[Union[list[str], "_models.MCPToolFilter"]] = None, + require_approval: Optional[Union["_models.MCPToolRequireApproval", Literal["always"], Literal["never"]]] = None, + project_connection_id: Optional[str] = None, ) -> None: ... @overload @@ -11168,45 +9903,35 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_FILE_SEARCH_CALL_SEARCHING # type: ignore + self.type = ToolType.MCP # type: ignore -class ResponseFunctionCallArgumentsDeltaEvent( - ResponseStreamEvent, discriminator="response.function_call_arguments.delta" -): - """Emitted when there is a partial function-call arguments delta. +class MCPToolFilter(_Model): + """MCP tool filter. - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always ``response.function_call_arguments.delta``. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_FUNCTION_CALL_ARGUMENTS_DELTA - :ivar item_id: The ID of the output item that the function-call arguments delta is added to. - Required. - :vartype item_id: str - :ivar output_index: The index of the output item that the function-call arguments delta is - added to. Required. - :vartype output_index: int - :ivar delta: The function-call arguments delta that is added. Required. - :vartype delta: str + :ivar tool_names: MCP allowed tools. + :vartype tool_names: list[str] + :ivar read_only: Indicates whether or not a tool modifies data or is read-only. If an + MCP server is `annotated with `readOnlyHint` + `_, + it will match this filter. + :vartype read_only: bool """ - type: Literal[ResponseStreamEventType.RESPONSE_FUNCTION_CALL_ARGUMENTS_DELTA] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always ``response.function_call_arguments.delta``. Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The ID of the output item that the function-call arguments delta is added to. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item that the function-call arguments delta is added to. Required.""" - delta: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The function-call arguments delta that is added. Required.""" + tool_names: Optional[list[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """MCP allowed tools.""" + read_only: Optional[bool] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Indicates whether or not a tool modifies data or is read-only. If an + MCP server is `annotated with `readOnlyHint` + `_, + it will match this filter.""" @overload def __init__( self, *, - sequence_number: int, - item_id: str, - output_index: int, - delta: str, + tool_names: Optional[list[str]] = None, + read_only: Optional[bool] = None, ) -> None: ... @overload @@ -11218,43 +9943,26 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_FUNCTION_CALL_ARGUMENTS_DELTA # type: ignore -class ResponseFunctionCallArgumentsDoneEvent( - ResponseStreamEvent, discriminator="response.function_call_arguments.done" -): - """Emitted when function-call arguments are finalized. +class MCPToolRequireApproval(_Model): + """MCPToolRequireApproval. - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_FUNCTION_CALL_ARGUMENTS_DONE - :ivar item_id: The ID of the item. Required. - :vartype item_id: str - :ivar output_index: The index of the output item. Required. - :vartype output_index: int - :ivar arguments: The function-call arguments. Required. - :vartype arguments: str + :ivar always: + :vartype always: ~azure.ai.projects.models.MCPToolFilter + :ivar never: + :vartype never: ~azure.ai.projects.models.MCPToolFilter """ - type: Literal[ResponseStreamEventType.RESPONSE_FUNCTION_CALL_ARGUMENTS_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The ID of the item. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item. Required.""" - arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The function-call arguments. Required.""" + always: Optional["_models.MCPToolFilter"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + never: Optional["_models.MCPToolFilter"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) @overload def __init__( self, *, - sequence_number: int, - item_id: str, - output_index: int, - arguments: str, + always: Optional["_models.MCPToolFilter"] = None, + never: Optional["_models.MCPToolFilter"] = None, ) -> None: ... @overload @@ -11266,36 +9974,32 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_FUNCTION_CALL_ARGUMENTS_DONE # type: ignore -class ResponseImageGenCallCompletedEvent(ResponseStreamEvent, discriminator="response.image_generation_call.completed"): - """Emitted when an image generation tool call has completed and the final image is available. +class MemoryOperation(_Model): + """Represents a single memory operation (create, update, or delete) performed on a memory item. - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always 'response.image_generation_call.completed'. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_IMAGE_GENERATION_CALL_COMPLETED - :ivar output_index: The index of the output item in the response's output array. Required. - :vartype output_index: int - :ivar item_id: The unique identifier of the image generation item being processed. Required. - :vartype item_id: str + :ivar kind: The type of memory operation being performed. Required. Known values are: "create", + "update", and "delete". + :vartype kind: str or ~azure.ai.projects.models.MemoryOperationKind + :ivar memory_item: The memory item to create, update, or delete. Required. + :vartype memory_item: ~azure.ai.projects.models.MemoryItem """ - type: Literal[ResponseStreamEventType.RESPONSE_IMAGE_GENERATION_CALL_COMPLETED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always 'response.image_generation_call.completed'. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item in the response's output array. Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique identifier of the image generation item being processed. Required.""" + kind: Union[str, "_models.MemoryOperationKind"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The type of memory operation being performed. Required. Known values are: \"create\", + \"update\", and \"delete\".""" + memory_item: "_models.MemoryItem" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The memory item to create, update, or delete. Required.""" @overload def __init__( self, *, - sequence_number: int, - output_index: int, - item_id: str, + kind: Union[str, "_models.MemoryOperationKind"], + memory_item: "_models.MemoryItem", ) -> None: ... @overload @@ -11307,40 +10011,23 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_IMAGE_GENERATION_CALL_COMPLETED # type: ignore -class ResponseImageGenCallGeneratingEvent( - ResponseStreamEvent, discriminator="response.image_generation_call.generating" -): - """Emitted when an image generation tool call is actively generating an image (intermediate - state). +class MemorySearchItem(_Model): + """A retrieved memory item from memory search. - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always 'response.image_generation_call.generating'. - Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_IMAGE_GENERATION_CALL_GENERATING - :ivar output_index: The index of the output item in the response's output array. Required. - :vartype output_index: int - :ivar item_id: The unique identifier of the image generation item being processed. Required. - :vartype item_id: str + :ivar memory_item: Retrieved memory item. Required. + :vartype memory_item: ~azure.ai.projects.models.MemoryItem """ - type: Literal[ResponseStreamEventType.RESPONSE_IMAGE_GENERATION_CALL_GENERATING] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always 'response.image_generation_call.generating'. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item in the response's output array. Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique identifier of the image generation item being processed. Required.""" + memory_item: "_models.MemoryItem" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Retrieved memory item. Required.""" @overload def __init__( self, *, - sequence_number: int, - output_index: int, - item_id: str, + memory_item: "_models.MemoryItem", ) -> None: ... @overload @@ -11352,39 +10039,23 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_IMAGE_GENERATION_CALL_GENERATING # type: ignore -class ResponseImageGenCallInProgressEvent( - ResponseStreamEvent, discriminator="response.image_generation_call.in_progress" -): - """Emitted when an image generation tool call is in progress. +class MemorySearchOptions(_Model): + """Memory search options. - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always 'response.image_generation_call.in_progress'. - Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_IMAGE_GENERATION_CALL_IN_PROGRESS - :ivar output_index: The index of the output item in the response's output array. Required. - :vartype output_index: int - :ivar item_id: The unique identifier of the image generation item being processed. Required. - :vartype item_id: str + :ivar max_memories: Maximum number of memory items to return. + :vartype max_memories: int """ - type: Literal[ResponseStreamEventType.RESPONSE_IMAGE_GENERATION_CALL_IN_PROGRESS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always 'response.image_generation_call.in_progress'. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item in the response's output array. Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique identifier of the image generation item being processed. Required.""" + max_memories: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Maximum number of memory items to return.""" @overload def __init__( self, *, - sequence_number: int, - output_index: int, - item_id: str, + max_memories: Optional[int] = None, ) -> None: ... @overload @@ -11396,52 +10067,49 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_IMAGE_GENERATION_CALL_IN_PROGRESS # type: ignore -class ResponseImageGenCallPartialImageEvent( - ResponseStreamEvent, discriminator="response.image_generation_call.partial_image" -): - """Emitted when a partial image is available during image generation streaming. +class MemorySearchPreviewTool(Tool, discriminator="memory_search"): + """A tool for integrating memories into the agent. - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always 'response.image_generation_call.partial_image'. - Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_IMAGE_GENERATION_CALL_PARTIAL_IMAGE - :ivar output_index: The index of the output item in the response's output array. Required. - :vartype output_index: int - :ivar item_id: The unique identifier of the image generation item being processed. Required. - :vartype item_id: str - :ivar partial_image_index: 0-based index for the partial image (backend is 1-based, but this is - 0-based for the user). Required. - :vartype partial_image_index: int - :ivar partial_image_b64: Base64-encoded partial image data, suitable for rendering as an image. - Required. - :vartype partial_image_b64: str + :ivar type: The type of the tool. Always ``memory_search``. Required. + :vartype type: str or ~azure.ai.projects.models.MEMORY_SEARCH + :ivar memory_store_name: The name of the memory store to use. Required. + :vartype memory_store_name: str + :ivar scope: The namespace used to group and isolate memories, such as a user ID. + Limits which memories can be retrieved or updated. + Use special variable ``{{$userId}}`` to scope memories to the current signed-in user. Required. + :vartype scope: str + :ivar search_options: Options for searching the memory store. + :vartype search_options: ~azure.ai.projects.models.MemorySearchOptions + :ivar update_delay: Time to wait before updating memories after inactivity (seconds). Default + 300. + :vartype update_delay: int """ - type: Literal[ResponseStreamEventType.RESPONSE_IMAGE_GENERATION_CALL_PARTIAL_IMAGE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always 'response.image_generation_call.partial_image'. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item in the response's output array. Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique identifier of the image generation item being processed. Required.""" - partial_image_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """0-based index for the partial image (backend is 1-based, but this is 0-based for the user). - Required.""" - partial_image_b64: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Base64-encoded partial image data, suitable for rendering as an image. Required.""" + type: Literal[ToolType.MEMORY_SEARCH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the tool. Always ``memory_search``. Required.""" + memory_store_name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the memory store to use. Required.""" + scope: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The namespace used to group and isolate memories, such as a user ID. + Limits which memories can be retrieved or updated. + Use special variable ``{{$userId}}`` to scope memories to the current signed-in user. Required.""" + search_options: Optional["_models.MemorySearchOptions"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Options for searching the memory store.""" + update_delay: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Time to wait before updating memories after inactivity (seconds). Default 300.""" @overload def __init__( self, *, - sequence_number: int, - output_index: int, - item_id: str, - partial_image_index: int, - partial_image_b64: str, + memory_store_name: str, + scope: str, + search_options: Optional["_models.MemorySearchOptions"] = None, + update_delay: Optional[int] = None, ) -> None: ... @overload @@ -11453,28 +10121,47 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_IMAGE_GENERATION_CALL_PARTIAL_IMAGE # type: ignore + self.type = ToolType.MEMORY_SEARCH # type: ignore -class ResponseIncompleteDetails1(_Model): - """ResponseIncompleteDetails1. +class MemorySearchToolCallItemResource(ItemResource, discriminator="memory_search_call"): + """MemorySearchToolCallItemResource. - :ivar reason: The reason why the response is incomplete. Is either a - Literal["max_output_tokens"] type or a Literal["content_filter"] type. - :vartype reason: str or str + :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or + a str type. + :vartype created_by: ~azure.ai.projects.models.CreatedBy or str + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.MEMORY_SEARCH_CALL + :ivar status: The status of the memory search tool call. One of ``in_progress``, + ``searching``, ``completed``, ``incomplete`` or ``failed``,. Required. Is one of the following + types: Literal["in_progress"], Literal["searching"], Literal["completed"], + Literal["incomplete"], Literal["failed"] + :vartype status: str or str or str or str or str + :ivar results: The results returned from the memory search. + :vartype results: list[~azure.ai.projects.models.MemorySearchItem] """ - reason: Optional[Literal["max_output_tokens", "content_filter"]] = rest_field( + type: Literal[ItemResourceType.MEMORY_SEARCH_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + status: Literal["in_progress", "searching", "completed", "incomplete", "failed"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The status of the memory search tool call. One of ``in_progress``, + ``searching``, ``completed``, ``incomplete`` or ``failed``,. Required. Is one of the following + types: Literal[\"in_progress\"], Literal[\"searching\"], Literal[\"completed\"], + Literal[\"incomplete\"], Literal[\"failed\"]""" + results: Optional[list["_models.MemorySearchItem"]] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) - """The reason why the response is incomplete. Is either a Literal[\"max_output_tokens\"] type or a - Literal[\"content_filter\"] type.""" + """The results returned from the memory search.""" @overload def __init__( self, *, - reason: Optional[Literal["max_output_tokens", "content_filter"]] = None, + status: Literal["in_progress", "searching", "completed", "incomplete", "failed"], + created_by: Optional[Union["_models.CreatedBy", str]] = None, + results: Optional[list["_models.MemorySearchItem"]] = None, ) -> None: ... @overload @@ -11486,30 +10173,28 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) + self.type = ItemResourceType.MEMORY_SEARCH_CALL # type: ignore -class ResponseIncompleteEvent(ResponseStreamEvent, discriminator="response.incomplete"): - """An event that is emitted when a response finishes as incomplete. +class MemoryStoreDefinition(_Model): + """Base definition for memory store configurations. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + MemoryStoreDefaultDefinition - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always ``response.incomplete``. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_INCOMPLETE - :ivar response: The response that was incomplete. Required. - :vartype response: ~azure.ai.projects.models.Response + :ivar kind: The kind of the memory store. Required. "default" + :vartype kind: str or ~azure.ai.projects.models.MemoryStoreKind """ - type: Literal[ResponseStreamEventType.RESPONSE_INCOMPLETE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always ``response.incomplete``. Required.""" - response: "_models.Response" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The response that was incomplete. Required.""" + __mapping__: dict[str, _Model] = {} + kind: str = rest_discriminator(name="kind", visibility=["read", "create", "update", "delete", "query"]) + """The kind of the memory store. Required. \"default\"""" @overload def __init__( self, *, - sequence_number: int, - response: "_models.Response", + kind: str, ) -> None: ... @overload @@ -11521,31 +10206,40 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_INCOMPLETE # type: ignore -class ResponseInProgressEvent(ResponseStreamEvent, discriminator="response.in_progress"): - """Emitted when the response is in progress. +class MemoryStoreDefaultDefinition(MemoryStoreDefinition, discriminator="default"): + """Default memory store implementation. - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always ``response.in_progress``. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_IN_PROGRESS - :ivar response: The response that is in progress. Required. - :vartype response: ~azure.ai.projects.models.Response + :ivar kind: The kind of the memory store. Required. The default memory store implementation. + :vartype kind: str or ~azure.ai.projects.models.DEFAULT + :ivar chat_model: The name or identifier of the chat completion model deployment used for + memory processing. Required. + :vartype chat_model: str + :ivar embedding_model: The name or identifier of the embedding model deployment used for memory + processing. Required. + :vartype embedding_model: str + :ivar options: Default memory store options. + :vartype options: ~azure.ai.projects.models.MemoryStoreDefaultOptions """ - type: Literal[ResponseStreamEventType.RESPONSE_IN_PROGRESS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always ``response.in_progress``. Required.""" - response: "_models.Response" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The response that is in progress. Required.""" + kind: Literal[MemoryStoreKind.DEFAULT] = rest_discriminator(name="kind", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The kind of the memory store. Required. The default memory store implementation.""" + chat_model: str = rest_field(visibility=["read", "create"]) + """The name or identifier of the chat completion model deployment used for memory processing. + Required.""" + embedding_model: str = rest_field(visibility=["read", "create"]) + """The name or identifier of the embedding model deployment used for memory processing. Required.""" + options: Optional["_models.MemoryStoreDefaultOptions"] = rest_field(visibility=["read", "create"]) + """Default memory store options.""" @overload def __init__( self, *, - sequence_number: int, - response: "_models.Response", + chat_model: str, + embedding_model: str, + options: Optional["_models.MemoryStoreDefaultOptions"] = None, ) -> None: ... @overload @@ -11557,41 +10251,37 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_IN_PROGRESS # type: ignore + self.kind = MemoryStoreKind.DEFAULT # type: ignore -class ResponseMCPCallArgumentsDeltaEvent(ResponseStreamEvent, discriminator="response.mcp_call.arguments_delta"): - """Emitted when there is a delta (partial update) to the arguments of an MCP tool call. +class MemoryStoreDefaultOptions(_Model): + """Default memory store configurations. - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always 'response.mcp_call.arguments_delta'. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_MCP_CALL_ARGUMENTS_DELTA - :ivar output_index: The index of the output item in the response's output array. Required. - :vartype output_index: int - :ivar item_id: The unique identifier of the MCP tool call item being processed. Required. - :vartype item_id: str - :ivar delta: The partial update to the arguments for the MCP tool call. Required. - :vartype delta: any + :ivar user_profile_enabled: Whether to enable user profile extraction and storage. Default is + true. Required. + :vartype user_profile_enabled: bool + :ivar user_profile_details: Specific categories or types of user profile information to extract + and store. + :vartype user_profile_details: str + :ivar chat_summary_enabled: Whether to enable chat summary extraction and storage. Default is + true. Required. + :vartype chat_summary_enabled: bool """ - type: Literal[ResponseStreamEventType.RESPONSE_MCP_CALL_ARGUMENTS_DELTA] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always 'response.mcp_call.arguments_delta'. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item in the response's output array. Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique identifier of the MCP tool call item being processed. Required.""" - delta: Any = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The partial update to the arguments for the MCP tool call. Required.""" + user_profile_enabled: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Whether to enable user profile extraction and storage. Default is true. Required.""" + user_profile_details: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Specific categories or types of user profile information to extract and store.""" + chat_summary_enabled: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Whether to enable chat summary extraction and storage. Default is true. Required.""" @overload def __init__( self, *, - sequence_number: int, - output_index: int, - item_id: str, - delta: Any, + user_profile_enabled: bool, + chat_summary_enabled: bool, + user_profile_details: Optional[str] = None, ) -> None: ... @overload @@ -11603,41 +10293,41 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_MCP_CALL_ARGUMENTS_DELTA # type: ignore -class ResponseMCPCallArgumentsDoneEvent(ResponseStreamEvent, discriminator="response.mcp_call.arguments_done"): - """Emitted when the arguments for an MCP tool call are finalized. +class MemoryStoreDeleteScopeResult(_Model): + """Response for deleting memories from a scope. - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always 'response.mcp_call.arguments_done'. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_MCP_CALL_ARGUMENTS_DONE - :ivar output_index: The index of the output item in the response's output array. Required. - :vartype output_index: int - :ivar item_id: The unique identifier of the MCP tool call item being processed. Required. - :vartype item_id: str - :ivar arguments: The finalized arguments for the MCP tool call. Required. - :vartype arguments: any + :ivar object: The object type. Always 'memory_store.scope.deleted'. Required. Default value is + "memory_store.scope.deleted". + :vartype object: str + :ivar name: The name of the memory store. Required. + :vartype name: str + :ivar scope: The scope from which memories were deleted. Required. + :vartype scope: str + :ivar deleted: Whether the deletion operation was successful. Required. + :vartype deleted: bool """ - type: Literal[ResponseStreamEventType.RESPONSE_MCP_CALL_ARGUMENTS_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always 'response.mcp_call.arguments_done'. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item in the response's output array. Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique identifier of the MCP tool call item being processed. Required.""" - arguments: Any = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The finalized arguments for the MCP tool call. Required.""" + object: Literal["memory_store.scope.deleted"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The object type. Always 'memory_store.scope.deleted'. Required. Default value is + \"memory_store.scope.deleted\".""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the memory store. Required.""" + scope: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The scope from which memories were deleted. Required.""" + deleted: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Whether the deletion operation was successful. Required.""" @overload def __init__( self, *, - sequence_number: int, - output_index: int, - item_id: str, - arguments: Any, + name: str, + scope: str, + deleted: bool, ) -> None: ... @overload @@ -11649,26 +10339,64 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_MCP_CALL_ARGUMENTS_DONE # type: ignore + self.object: Literal["memory_store.scope.deleted"] = "memory_store.scope.deleted" -class ResponseMCPCallCompletedEvent(ResponseStreamEvent, discriminator="response.mcp_call.completed"): - """Emitted when an MCP tool call has completed successfully. +class MemoryStoreDetails(_Model): + """A memory store that can store and retrieve user memories. - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always 'response.mcp_call.completed'. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_MCP_CALL_COMPLETED + :ivar object: The object type, which is always 'memory_store'. Required. Default value is + "memory_store". + :vartype object: str + :ivar id: The unique identifier of the memory store. Required. + :vartype id: str + :ivar created_at: The Unix timestamp (seconds) when the memory store was created. Required. + :vartype created_at: ~datetime.datetime + :ivar updated_at: The Unix timestamp (seconds) when the memory store was last updated. + Required. + :vartype updated_at: ~datetime.datetime + :ivar name: The name of the memory store. Required. + :vartype name: str + :ivar description: A human-readable description of the memory store. + :vartype description: str + :ivar metadata: Arbitrary key-value metadata to associate with the memory store. + :vartype metadata: dict[str, str] + :ivar definition: The definition of the memory store. Required. + :vartype definition: ~azure.ai.projects.models.MemoryStoreDefinition """ - type: Literal[ResponseStreamEventType.RESPONSE_MCP_CALL_COMPLETED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always 'response.mcp_call.completed'. Required.""" + object: Literal["memory_store"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The object type, which is always 'memory_store'. Required. Default value is \"memory_store\".""" + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique identifier of the memory store. Required.""" + created_at: datetime.datetime = rest_field( + visibility=["read", "create", "update", "delete", "query"], format="unix-timestamp" + ) + """The Unix timestamp (seconds) when the memory store was created. Required.""" + updated_at: datetime.datetime = rest_field( + visibility=["read", "create", "update", "delete", "query"], format="unix-timestamp" + ) + """The Unix timestamp (seconds) when the memory store was last updated. Required.""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the memory store. Required.""" + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A human-readable description of the memory store.""" + metadata: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Arbitrary key-value metadata to associate with the memory store.""" + definition: "_models.MemoryStoreDefinition" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The definition of the memory store. Required.""" @overload def __init__( self, *, - sequence_number: int, + id: str, # pylint: disable=redefined-builtin + created_at: datetime.datetime, + updated_at: datetime.datetime, + name: str, + definition: "_models.MemoryStoreDefinition", + description: Optional[str] = None, + metadata: Optional[dict[str, str]] = None, ) -> None: ... @overload @@ -11680,26 +10408,53 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_MCP_CALL_COMPLETED # type: ignore + self.object: Literal["memory_store"] = "memory_store" -class ResponseMCPCallFailedEvent(ResponseStreamEvent, discriminator="response.mcp_call.failed"): - """Emitted when an MCP tool call has failed. +class MemoryStoreOperationUsage(_Model): + """Usage statistics of a memory store operation. - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always 'response.mcp_call.failed'. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_MCP_CALL_FAILED + :ivar embedding_tokens: The number of embedding tokens. Required. + :vartype embedding_tokens: int + :ivar input_tokens: The number of input tokens. Required. + :vartype input_tokens: int + :ivar input_tokens_details: A detailed breakdown of the input tokens. Required. + :vartype input_tokens_details: ~azure.ai.projects.models.ResponseUsageInputTokensDetails + :ivar output_tokens: The number of output tokens. Required. + :vartype output_tokens: int + :ivar output_tokens_details: A detailed breakdown of the output tokens. Required. + :vartype output_tokens_details: ~azure.ai.projects.models.ResponseUsageOutputTokensDetails + :ivar total_tokens: The total number of tokens used. Required. + :vartype total_tokens: int """ - type: Literal[ResponseStreamEventType.RESPONSE_MCP_CALL_FAILED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always 'response.mcp_call.failed'. Required.""" + embedding_tokens: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The number of embedding tokens. Required.""" + input_tokens: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The number of input tokens. Required.""" + input_tokens_details: "_models.ResponseUsageInputTokensDetails" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """A detailed breakdown of the input tokens. Required.""" + output_tokens: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The number of output tokens. Required.""" + output_tokens_details: "_models.ResponseUsageOutputTokensDetails" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """A detailed breakdown of the output tokens. Required.""" + total_tokens: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The total number of tokens used. Required.""" @overload def __init__( self, *, - sequence_number: int, + embedding_tokens: int, + input_tokens: int, + input_tokens_details: "_models.ResponseUsageInputTokensDetails", + output_tokens: int, + output_tokens_details: "_models.ResponseUsageOutputTokensDetails", + total_tokens: int, ) -> None: ... @overload @@ -11711,36 +10466,35 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_MCP_CALL_FAILED # type: ignore -class ResponseMCPCallInProgressEvent(ResponseStreamEvent, discriminator="response.mcp_call.in_progress"): - """Emitted when an MCP tool call is in progress. +class MemoryStoreSearchResult(_Model): + """Memory search response. - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always 'response.mcp_call.in_progress'. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_MCP_CALL_IN_PROGRESS - :ivar output_index: The index of the output item in the response's output array. Required. - :vartype output_index: int - :ivar item_id: The unique identifier of the MCP tool call item being processed. Required. - :vartype item_id: str + :ivar search_id: The unique ID of this search request. Use this value as previous_search_id in + subsequent requests to perform incremental searches. Required. + :vartype search_id: str + :ivar memories: Related memory items found during the search operation. Required. + :vartype memories: list[~azure.ai.projects.models.MemorySearchItem] + :ivar usage: Usage statistics associated with the memory search operation. Required. + :vartype usage: ~azure.ai.projects.models.MemoryStoreOperationUsage """ - type: Literal[ResponseStreamEventType.RESPONSE_MCP_CALL_IN_PROGRESS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always 'response.mcp_call.in_progress'. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item in the response's output array. Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique identifier of the MCP tool call item being processed. Required.""" + search_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of this search request. Use this value as previous_search_id in subsequent + requests to perform incremental searches. Required.""" + memories: list["_models.MemorySearchItem"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Related memory items found during the search operation. Required.""" + usage: "_models.MemoryStoreOperationUsage" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Usage statistics associated with the memory search operation. Required.""" @overload def __init__( self, *, - sequence_number: int, - output_index: int, - item_id: str, + search_id: str, + memories: list["_models.MemorySearchItem"], + usage: "_models.MemoryStoreOperationUsage", ) -> None: ... @overload @@ -11752,26 +10506,31 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_MCP_CALL_IN_PROGRESS # type: ignore -class ResponseMCPListToolsCompletedEvent(ResponseStreamEvent, discriminator="response.mcp_list_tools.completed"): - """Emitted when the list of available MCP tools has been successfully retrieved. +class MemoryStoreUpdateCompletedResult(_Model): + """Memory update result. - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always 'response.mcp_list_tools.completed'. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_MCP_LIST_TOOLS_COMPLETED + :ivar memory_operations: A list of individual memory operations that were performed during the + update. Required. + :vartype memory_operations: list[~azure.ai.projects.models.MemoryOperation] + :ivar usage: Usage statistics associated with the memory update operation. Required. + :vartype usage: ~azure.ai.projects.models.MemoryStoreOperationUsage """ - type: Literal[ResponseStreamEventType.RESPONSE_MCP_LIST_TOOLS_COMPLETED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always 'response.mcp_list_tools.completed'. Required.""" + memory_operations: list["_models.MemoryOperation"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """A list of individual memory operations that were performed during the update. Required.""" + usage: "_models.MemoryStoreOperationUsage" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Usage statistics associated with the memory update operation. Required.""" @overload def __init__( self, *, - sequence_number: int, + memory_operations: list["_models.MemoryOperation"], + usage: "_models.MemoryStoreOperationUsage", ) -> None: ... @overload @@ -11783,26 +10542,53 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_MCP_LIST_TOOLS_COMPLETED # type: ignore -class ResponseMCPListToolsFailedEvent(ResponseStreamEvent, discriminator="response.mcp_list_tools.failed"): - """Emitted when the attempt to list available MCP tools has failed. +class MemoryStoreUpdateResult(_Model): + """Provides the status of a memory store update operation. - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always 'response.mcp_list_tools.failed'. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_MCP_LIST_TOOLS_FAILED + :ivar update_id: The unique ID of this update request. Use this value as previous_update_id in + subsequent requests to perform incremental updates. Required. + :vartype update_id: str + :ivar status: The status of the memory update operation. One of "queued", "in_progress", + "completed", "failed", or "superseded". Required. Known values are: "queued", "in_progress", + "completed", "failed", and "superseded". + :vartype status: str or ~azure.ai.projects.models.MemoryStoreUpdateStatus + :ivar superseded_by: The update_id the operation was superseded by when status is "superseded". + :vartype superseded_by: str + :ivar result: The result of memory store update operation when status is "completed". + :vartype result: ~azure.ai.projects.models.MemoryStoreUpdateCompletedResult + :ivar error: Error object that describes the error when status is "failed". + :vartype error: ~azure.ai.projects.models.Error """ - type: Literal[ResponseStreamEventType.RESPONSE_MCP_LIST_TOOLS_FAILED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always 'response.mcp_list_tools.failed'. Required.""" + update_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of this update request. Use this value as previous_update_id in subsequent + requests to perform incremental updates. Required.""" + status: Union[str, "_models.MemoryStoreUpdateStatus"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The status of the memory update operation. One of \"queued\", \"in_progress\", \"completed\", + \"failed\", or \"superseded\". Required. Known values are: \"queued\", \"in_progress\", + \"completed\", \"failed\", and \"superseded\".""" + superseded_by: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The update_id the operation was superseded by when status is \"superseded\".""" + result: Optional["_models.MemoryStoreUpdateCompletedResult"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The result of memory store update operation when status is \"completed\".""" + error: Optional["_models.Error"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Error object that describes the error when status is \"failed\".""" @overload def __init__( self, *, - sequence_number: int, + update_id: str, + status: Union[str, "_models.MemoryStoreUpdateStatus"], + superseded_by: Optional[str] = None, + result: Optional["_models.MemoryStoreUpdateCompletedResult"] = None, + error: Optional["_models.Error"] = None, ) -> None: ... @overload @@ -11814,26 +10600,29 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_MCP_LIST_TOOLS_FAILED # type: ignore -class ResponseMCPListToolsInProgressEvent(ResponseStreamEvent, discriminator="response.mcp_list_tools.in_progress"): - """Emitted when the system is in the process of retrieving the list of available MCP tools. +class MicrosoftFabricPreviewTool(Tool, discriminator="fabric_dataagent_preview"): + """The input definition information for a Microsoft Fabric tool as used to configure an agent. - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always 'response.mcp_list_tools.in_progress'. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_MCP_LIST_TOOLS_IN_PROGRESS + :ivar type: The object type, which is always 'fabric_dataagent_preview'. Required. + :vartype type: str or ~azure.ai.projects.models.FABRIC_DATAAGENT_PREVIEW + :ivar fabric_dataagent_preview: The fabric data agent tool parameters. Required. + :vartype fabric_dataagent_preview: ~azure.ai.projects.models.FabricDataAgentToolParameters """ - type: Literal[ResponseStreamEventType.RESPONSE_MCP_LIST_TOOLS_IN_PROGRESS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always 'response.mcp_list_tools.in_progress'. Required.""" + type: Literal[ToolType.FABRIC_DATAAGENT_PREVIEW] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The object type, which is always 'fabric_dataagent_preview'. Required.""" + fabric_dataagent_preview: "_models.FabricDataAgentToolParameters" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The fabric data agent tool parameters. Required.""" @overload def __init__( self, *, - sequence_number: int, + fabric_dataagent_preview: "_models.FabricDataAgentToolParameters", ) -> None: ... @overload @@ -11845,36 +10634,48 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_MCP_LIST_TOOLS_IN_PROGRESS # type: ignore + self.type = ToolType.FABRIC_DATAAGENT_PREVIEW # type: ignore -class ResponseOutputItemAddedEvent(ResponseStreamEvent, discriminator="response.output_item.added"): - """Emitted when a new output item is added. +class ModelDeployment(Deployment, discriminator="ModelDeployment"): + """Model Deployment Definition. - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always ``response.output_item.added``. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_OUTPUT_ITEM_ADDED - :ivar output_index: The index of the output item that was added. Required. - :vartype output_index: int - :ivar item: The output item that was added. Required. - :vartype item: ~azure.ai.projects.models.ItemResource + :ivar name: Name of the deployment. Required. + :vartype name: str + :ivar type: The type of the deployment. Required. Model deployment + :vartype type: str or ~azure.ai.projects.models.MODEL_DEPLOYMENT + :ivar model_name: Publisher-specific name of the deployed model. Required. + :vartype model_name: str + :ivar model_version: Publisher-specific version of the deployed model. Required. + :vartype model_version: str + :ivar model_publisher: Name of the deployed model's publisher. Required. + :vartype model_publisher: str + :ivar capabilities: Capabilities of deployed model. Required. + :vartype capabilities: dict[str, str] + :ivar sku: Sku of the model deployment. Required. + :vartype sku: ~azure.ai.projects.models.ModelDeploymentSku + :ivar connection_name: Name of the connection the deployment comes from. + :vartype connection_name: str """ - type: Literal[ResponseStreamEventType.RESPONSE_OUTPUT_ITEM_ADDED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always ``response.output_item.added``. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item that was added. Required.""" - item: "_models.ItemResource" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The output item that was added. Required.""" + type: Literal[DeploymentType.MODEL_DEPLOYMENT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the deployment. Required. Model deployment""" + model_name: str = rest_field(name="modelName", visibility=["read"]) + """Publisher-specific name of the deployed model. Required.""" + model_version: str = rest_field(name="modelVersion", visibility=["read"]) + """Publisher-specific version of the deployed model. Required.""" + model_publisher: str = rest_field(name="modelPublisher", visibility=["read"]) + """Name of the deployed model's publisher. Required.""" + capabilities: dict[str, str] = rest_field(visibility=["read"]) + """Capabilities of deployed model. Required.""" + sku: "_models.ModelDeploymentSku" = rest_field(visibility=["read"]) + """Sku of the model deployment. Required.""" + connection_name: Optional[str] = rest_field(name="connectionName", visibility=["read"]) + """Name of the connection the deployment comes from.""" @overload def __init__( self, - *, - sequence_number: int, - output_index: int, - item: "_models.ItemResource", ) -> None: ... @overload @@ -11886,36 +10687,44 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_OUTPUT_ITEM_ADDED # type: ignore + self.type = DeploymentType.MODEL_DEPLOYMENT # type: ignore -class ResponseOutputItemDoneEvent(ResponseStreamEvent, discriminator="response.output_item.done"): - """Emitted when an output item is marked done. +class ModelDeploymentSku(_Model): + """Sku information. - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always ``response.output_item.done``. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_OUTPUT_ITEM_DONE - :ivar output_index: The index of the output item that was marked done. Required. - :vartype output_index: int - :ivar item: The output item that was marked done. Required. - :vartype item: ~azure.ai.projects.models.ItemResource + :ivar capacity: Sku capacity. Required. + :vartype capacity: int + :ivar family: Sku family. Required. + :vartype family: str + :ivar name: Sku name. Required. + :vartype name: str + :ivar size: Sku size. Required. + :vartype size: str + :ivar tier: Sku tier. Required. + :vartype tier: str """ - type: Literal[ResponseStreamEventType.RESPONSE_OUTPUT_ITEM_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always ``response.output_item.done``. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item that was marked done. Required.""" - item: "_models.ItemResource" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The output item that was marked done. Required.""" + capacity: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Sku capacity. Required.""" + family: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Sku family. Required.""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Sku name. Required.""" + size: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Sku size. Required.""" + tier: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Sku tier. Required.""" @overload def __init__( self, *, - sequence_number: int, - output_index: int, - item: "_models.ItemResource", + capacity: int, + family: str, + name: str, + size: str, + tier: str, ) -> None: ... @overload @@ -11927,39 +10736,29 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_OUTPUT_ITEM_DONE # type: ignore - -class ResponsePromptVariables(_Model): - """Optional map of values to substitute in for variables in your - prompt. The substitution values can either be strings, or other - Response input types like images or files. - """ - - -class ResponseQueuedEvent(ResponseStreamEvent, discriminator="response.queued"): - """Emitted when a response is queued and waiting to be processed. +class MonthlyRecurrenceSchedule(RecurrenceSchedule, discriminator="Monthly"): + """Monthly recurrence schedule. - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always 'response.queued'. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_QUEUED - :ivar response: The full response object that is queued. Required. - :vartype response: ~azure.ai.projects.models.Response + :ivar type: Monthly recurrence type. Required. Monthly recurrence pattern. + :vartype type: str or ~azure.ai.projects.models.MONTHLY + :ivar days_of_month: Days of the month for the recurrence schedule. Required. + :vartype days_of_month: list[int] """ - type: Literal[ResponseStreamEventType.RESPONSE_QUEUED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always 'response.queued'. Required.""" - response: "_models.Response" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The full response object that is queued. Required.""" + type: Literal[RecurrenceType.MONTHLY] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Monthly recurrence type. Required. Monthly recurrence pattern.""" + days_of_month: list[int] = rest_field( + name="daysOfMonth", visibility=["read", "create", "update", "delete", "query"] + ) + """Days of the month for the recurrence schedule. Required.""" @overload def __init__( self, *, - sequence_number: int, - response: "_models.Response", + days_of_month: list[int], ) -> None: ... @overload @@ -11971,47 +10770,35 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_QUEUED # type: ignore + self.type = RecurrenceType.MONTHLY # type: ignore -class ResponseReasoningDeltaEvent(ResponseStreamEvent, discriminator="response.reasoning.delta"): - """Emitted when there is a delta (partial update) to the reasoning content. +class Move(ComputerAction, discriminator="move"): + """Move. - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always 'response.reasoning.delta'. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_REASONING_DELTA - :ivar item_id: The unique identifier of the item for which reasoning is being updated. - Required. - :vartype item_id: str - :ivar output_index: The index of the output item in the response's output array. Required. - :vartype output_index: int - :ivar content_index: The index of the reasoning content part within the output item. Required. - :vartype content_index: int - :ivar delta: The partial update to the reasoning content. Required. - :vartype delta: any + :ivar type: Specifies the event type. For a move action, this property is + always set to ``move``. Required. + :vartype type: str or ~azure.ai.projects.models.MOVE + :ivar x: The x-coordinate to move to. Required. + :vartype x: int + :ivar y: The y-coordinate to move to. Required. + :vartype y: int """ - type: Literal[ResponseStreamEventType.RESPONSE_REASONING_DELTA] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always 'response.reasoning.delta'. Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique identifier of the item for which reasoning is being updated. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item in the response's output array. Required.""" - content_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the reasoning content part within the output item. Required.""" - delta: Any = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The partial update to the reasoning content. Required.""" + type: Literal[ComputerActionType.MOVE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Specifies the event type. For a move action, this property is + always set to ``move``. Required.""" + x: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The x-coordinate to move to. Required.""" + y: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The y-coordinate to move to. Required.""" @overload def __init__( self, *, - sequence_number: int, - item_id: str, - output_index: int, - content_index: int, - delta: Any, + x: int, + y: int, ) -> None: ... @overload @@ -12023,46 +10810,22 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_REASONING_DELTA # type: ignore + self.type = ComputerActionType.MOVE # type: ignore -class ResponseReasoningDoneEvent(ResponseStreamEvent, discriminator="response.reasoning.done"): - """Emitted when the reasoning content is finalized for an item. +class NoAuthenticationCredentials(BaseCredentials, discriminator="None"): + """Credentials that do not require authentication. - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always 'response.reasoning.done'. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_REASONING_DONE - :ivar item_id: The unique identifier of the item for which reasoning is finalized. Required. - :vartype item_id: str - :ivar output_index: The index of the output item in the response's output array. Required. - :vartype output_index: int - :ivar content_index: The index of the reasoning content part within the output item. Required. - :vartype content_index: int - :ivar text: The finalized reasoning text. Required. - :vartype text: str + :ivar type: The credential type. Required. No credential + :vartype type: str or ~azure.ai.projects.models.NONE """ - type: Literal[ResponseStreamEventType.RESPONSE_REASONING_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always 'response.reasoning.done'. Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique identifier of the item for which reasoning is finalized. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item in the response's output array. Required.""" - content_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the reasoning content part within the output item. Required.""" - text: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The finalized reasoning text. Required.""" + type: Literal[CredentialType.NONE] = rest_discriminator(name="type", visibility=["read"]) # type: ignore + """The credential type. Required. No credential""" @overload def __init__( self, - *, - sequence_number: int, - item_id: str, - output_index: int, - content_index: int, - text: str, ) -> None: ... @overload @@ -12074,47 +10837,42 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_REASONING_DONE # type: ignore + self.type = CredentialType.NONE # type: ignore -class ResponseReasoningSummaryDeltaEvent(ResponseStreamEvent, discriminator="response.reasoning_summary.delta"): - """Emitted when there is a delta (partial update) to the reasoning summary content. +class OAuthConsentRequestItemResource(ItemResource, discriminator="oauth_consent_request"): + """Request from the service for the user to perform OAuth consent. - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always 'response.reasoning_summary.delta'. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_REASONING_SUMMARY_DELTA - :ivar item_id: The unique identifier of the item for which the reasoning summary is being - updated. Required. - :vartype item_id: str - :ivar output_index: The index of the output item in the response's output array. Required. - :vartype output_index: int - :ivar summary_index: The index of the summary part within the output item. Required. - :vartype summary_index: int - :ivar delta: The partial update to the reasoning summary content. Required. - :vartype delta: any + :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or + a str type. + :vartype created_by: ~azure.ai.projects.models.CreatedBy or str + :ivar id: Required. + :vartype id: str + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.OAUTH_CONSENT_REQUEST + :ivar consent_link: The link the user can use to perform OAuth consent. Required. + :vartype consent_link: str + :ivar server_label: The server label for the OAuth consent request. Required. + :vartype server_label: str """ - type: Literal[ResponseStreamEventType.RESPONSE_REASONING_SUMMARY_DELTA] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always 'response.reasoning_summary.delta'. Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique identifier of the item for which the reasoning summary is being updated. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item in the response's output array. Required.""" - summary_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the summary part within the output item. Required.""" - delta: Any = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The partial update to the reasoning summary content. Required.""" + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required.""" + type: Literal[ItemResourceType.OAUTH_CONSENT_REQUEST] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + consent_link: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The link the user can use to perform OAuth consent. Required.""" + server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The server label for the OAuth consent request. Required.""" @overload def __init__( self, *, - sequence_number: int, - item_id: str, - output_index: int, - summary_index: int, - delta: Any, + id: str, # pylint: disable=redefined-builtin + consent_link: str, + server_label: str, + created_by: Optional[Union["_models.CreatedBy", str]] = None, ) -> None: ... @overload @@ -12126,47 +10884,33 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_REASONING_SUMMARY_DELTA # type: ignore + self.type = ItemResourceType.OAUTH_CONSENT_REQUEST # type: ignore -class ResponseReasoningSummaryDoneEvent(ResponseStreamEvent, discriminator="response.reasoning_summary.done"): - """Emitted when the reasoning summary content is finalized for an item. +class OneTimeTrigger(Trigger, discriminator="OneTime"): + """One-time trigger. - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always 'response.reasoning_summary.done'. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_REASONING_SUMMARY_DONE - :ivar item_id: The unique identifier of the item for which the reasoning summary is finalized. - Required. - :vartype item_id: str - :ivar output_index: The index of the output item in the response's output array. Required. - :vartype output_index: int - :ivar summary_index: The index of the summary part within the output item. Required. - :vartype summary_index: int - :ivar text: The finalized reasoning summary text. Required. - :vartype text: str + :ivar type: Required. One-time trigger. + :vartype type: str or ~azure.ai.projects.models.ONE_TIME + :ivar trigger_at: Date and time for the one-time trigger in ISO 8601 format. Required. + :vartype trigger_at: str + :ivar time_zone: Time zone for the one-time trigger. + :vartype time_zone: str """ - type: Literal[ResponseStreamEventType.RESPONSE_REASONING_SUMMARY_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always 'response.reasoning_summary.done'. Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The unique identifier of the item for which the reasoning summary is finalized. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item in the response's output array. Required.""" - summary_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the summary part within the output item. Required.""" - text: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The finalized reasoning summary text. Required.""" + type: Literal[TriggerType.ONE_TIME] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required. One-time trigger.""" + trigger_at: str = rest_field(name="triggerAt", visibility=["read", "create", "update", "delete", "query"]) + """Date and time for the one-time trigger in ISO 8601 format. Required.""" + time_zone: Optional[str] = rest_field(name="timeZone", visibility=["read", "create", "update", "delete", "query"]) + """Time zone for the one-time trigger.""" @overload def __init__( self, *, - sequence_number: int, - item_id: str, - output_index: int, - summary_index: int, - text: str, + trigger_at: str, + time_zone: Optional[str] = None, ) -> None: ... @overload @@ -12178,49 +10922,30 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_REASONING_SUMMARY_DONE # type: ignore + self.type = TriggerType.ONE_TIME # type: ignore -class ResponseReasoningSummaryPartAddedEvent( - ResponseStreamEvent, discriminator="response.reasoning_summary_part.added" -): - """Emitted when a new reasoning summary part is added. +class OpenApiAuthDetails(_Model): + """authentication details for OpenApiFunctionDefinition. - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always ``response.reasoning_summary_part.added``. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_REASONING_SUMMARY_PART_ADDED - :ivar item_id: The ID of the item this summary part is associated with. Required. - :vartype item_id: str - :ivar output_index: The index of the output item this summary part is associated with. - Required. - :vartype output_index: int - :ivar summary_index: The index of the summary part within the reasoning summary. Required. - :vartype summary_index: int - :ivar part: The summary part that was added. Required. - :vartype part: ~azure.ai.projects.models.ReasoningItemSummaryPart + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + OpenApiAnonymousAuthDetails, OpenApiManagedAuthDetails, OpenApiProjectConnectionAuthDetails + + :ivar type: The type of authentication, must be anonymous/project_connection/managed_identity. + Required. Known values are: "anonymous", "project_connection", and "managed_identity". + :vartype type: str or ~azure.ai.projects.models.OpenApiAuthType """ - type: Literal[ResponseStreamEventType.RESPONSE_REASONING_SUMMARY_PART_ADDED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always ``response.reasoning_summary_part.added``. Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The ID of the item this summary part is associated with. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item this summary part is associated with. Required.""" - summary_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the summary part within the reasoning summary. Required.""" - part: "_models.ReasoningItemSummaryPart" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The summary part that was added. Required.""" + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """The type of authentication, must be anonymous/project_connection/managed_identity. Required. + Known values are: \"anonymous\", \"project_connection\", and \"managed_identity\".""" @overload def __init__( self, *, - sequence_number: int, - item_id: str, - output_index: int, - summary_index: int, - part: "_models.ReasoningItemSummaryPart", + type: str, ) -> None: ... @overload @@ -12232,47 +10957,21 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_REASONING_SUMMARY_PART_ADDED # type: ignore -class ResponseReasoningSummaryPartDoneEvent(ResponseStreamEvent, discriminator="response.reasoning_summary_part.done"): - """Emitted when a reasoning summary part is completed. +class OpenApiAnonymousAuthDetails(OpenApiAuthDetails, discriminator="anonymous"): + """Security details for OpenApi anonymous authentication. - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always ``response.reasoning_summary_part.done``. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_REASONING_SUMMARY_PART_DONE - :ivar item_id: The ID of the item this summary part is associated with. Required. - :vartype item_id: str - :ivar output_index: The index of the output item this summary part is associated with. - Required. - :vartype output_index: int - :ivar summary_index: The index of the summary part within the reasoning summary. Required. - :vartype summary_index: int - :ivar part: The completed summary part. Required. - :vartype part: ~azure.ai.projects.models.ReasoningItemSummaryPart + :ivar type: The object type, which is always 'anonymous'. Required. + :vartype type: str or ~azure.ai.projects.models.ANONYMOUS """ - type: Literal[ResponseStreamEventType.RESPONSE_REASONING_SUMMARY_PART_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always ``response.reasoning_summary_part.done``. Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The ID of the item this summary part is associated with. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item this summary part is associated with. Required.""" - summary_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the summary part within the reasoning summary. Required.""" - part: "_models.ReasoningItemSummaryPart" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The completed summary part. Required.""" + type: Literal[OpenApiAuthType.ANONYMOUS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The object type, which is always 'anonymous'. Required.""" @overload def __init__( self, - *, - sequence_number: int, - item_id: str, - output_index: int, - summary_index: int, - part: "_models.ReasoningItemSummaryPart", ) -> None: ... @overload @@ -12284,49 +10983,50 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_REASONING_SUMMARY_PART_DONE # type: ignore + self.type = OpenApiAuthType.ANONYMOUS # type: ignore -class ResponseReasoningSummaryTextDeltaEvent( - ResponseStreamEvent, discriminator="response.reasoning_summary_text.delta" -): - """Emitted when a delta is added to a reasoning summary text. +class OpenApiFunctionDefinition(_Model): + """The input definition information for an openapi function. - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always ``response.reasoning_summary_text.delta``. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_REASONING_SUMMARY_TEXT_DELTA - :ivar item_id: The ID of the item this summary text delta is associated with. Required. - :vartype item_id: str - :ivar output_index: The index of the output item this summary text delta is associated with. - Required. - :vartype output_index: int - :ivar summary_index: The index of the summary part within the reasoning summary. Required. - :vartype summary_index: int - :ivar delta: The text delta that was added to the summary. Required. - :vartype delta: str + :ivar name: The name of the function to be called. Required. + :vartype name: str + :ivar description: A description of what the function does, used by the model to choose when + and how to call the function. + :vartype description: str + :ivar spec: The openapi function shape, described as a JSON Schema object. Required. + :vartype spec: any + :ivar auth: Open API authentication details. Required. + :vartype auth: ~azure.ai.projects.models.OpenApiAuthDetails + :ivar default_params: List of OpenAPI spec parameters that will use user-provided defaults. + :vartype default_params: list[str] + :ivar functions: List of function definitions used by OpenApi tool. + :vartype functions: list[~azure.ai.projects.models.OpenApiFunctionDefinitionFunction] """ - type: Literal[ResponseStreamEventType.RESPONSE_REASONING_SUMMARY_TEXT_DELTA] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always ``response.reasoning_summary_text.delta``. Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The ID of the item this summary text delta is associated with. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item this summary text delta is associated with. Required.""" - summary_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the summary part within the reasoning summary. Required.""" - delta: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The text delta that was added to the summary. Required.""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the function to be called. Required.""" + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A description of what the function does, used by the model to choose when and how to call the + function.""" + spec: Any = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The openapi function shape, described as a JSON Schema object. Required.""" + auth: "_models.OpenApiAuthDetails" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Open API authentication details. Required.""" + default_params: Optional[list[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """List of OpenAPI spec parameters that will use user-provided defaults.""" + functions: Optional[list["_models.OpenApiFunctionDefinitionFunction"]] = rest_field(visibility=["read"]) + """List of function definitions used by OpenApi tool.""" @overload def __init__( self, *, - sequence_number: int, - item_id: str, - output_index: int, - summary_index: int, - delta: str, + name: str, + spec: Any, + auth: "_models.OpenApiAuthDetails", + description: Optional[str] = None, + default_params: Optional[list[str]] = None, ) -> None: ... @overload @@ -12338,47 +11038,36 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_REASONING_SUMMARY_TEXT_DELTA # type: ignore -class ResponseReasoningSummaryTextDoneEvent(ResponseStreamEvent, discriminator="response.reasoning_summary_text.done"): - """Emitted when a reasoning summary text is completed. +class OpenApiFunctionDefinitionFunction(_Model): + """OpenApiFunctionDefinitionFunction. - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always ``response.reasoning_summary_text.done``. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_REASONING_SUMMARY_TEXT_DONE - :ivar item_id: The ID of the item this summary text is associated with. Required. - :vartype item_id: str - :ivar output_index: The index of the output item this summary text is associated with. + :ivar name: The name of the function to be called. Required. + :vartype name: str + :ivar description: A description of what the function does, used by the model to choose when + and how to call the function. + :vartype description: str + :ivar parameters: The parameters the functions accepts, described as a JSON Schema object. Required. - :vartype output_index: int - :ivar summary_index: The index of the summary part within the reasoning summary. Required. - :vartype summary_index: int - :ivar text: The full text of the completed reasoning summary. Required. - :vartype text: str + :vartype parameters: any """ - type: Literal[ResponseStreamEventType.RESPONSE_REASONING_SUMMARY_TEXT_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always ``response.reasoning_summary_text.done``. Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The ID of the item this summary text is associated with. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item this summary text is associated with. Required.""" - summary_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the summary part within the reasoning summary. Required.""" - text: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The full text of the completed reasoning summary. Required.""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the function to be called. Required.""" + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A description of what the function does, used by the model to choose when and how to call the + function.""" + parameters: Any = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The parameters the functions accepts, described as a JSON Schema object. Required.""" @overload def __init__( self, *, - sequence_number: int, - item_id: str, - output_index: int, - summary_index: int, - text: str, + name: str, + parameters: Any, + description: Optional[str] = None, ) -> None: ... @overload @@ -12390,46 +11079,29 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_REASONING_SUMMARY_TEXT_DONE # type: ignore -class ResponseRefusalDeltaEvent(ResponseStreamEvent, discriminator="response.refusal.delta"): - """Emitted when there is a partial refusal text. +class OpenApiManagedAuthDetails(OpenApiAuthDetails, discriminator="managed_identity"): + """Security details for OpenApi managed_identity authentication. - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always ``response.refusal.delta``. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_REFUSAL_DELTA - :ivar item_id: The ID of the output item that the refusal text is added to. Required. - :vartype item_id: str - :ivar output_index: The index of the output item that the refusal text is added to. Required. - :vartype output_index: int - :ivar content_index: The index of the content part that the refusal text is added to. Required. - :vartype content_index: int - :ivar delta: The refusal text that is added. Required. - :vartype delta: str + :ivar type: The object type, which is always 'managed_identity'. Required. + :vartype type: str or ~azure.ai.projects.models.MANAGED_IDENTITY + :ivar security_scheme: Connection auth security details. Required. + :vartype security_scheme: ~azure.ai.projects.models.OpenApiManagedSecurityScheme """ - type: Literal[ResponseStreamEventType.RESPONSE_REFUSAL_DELTA] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always ``response.refusal.delta``. Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The ID of the output item that the refusal text is added to. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item that the refusal text is added to. Required.""" - content_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the content part that the refusal text is added to. Required.""" - delta: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The refusal text that is added. Required.""" + type: Literal[OpenApiAuthType.MANAGED_IDENTITY] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The object type, which is always 'managed_identity'. Required.""" + security_scheme: "_models.OpenApiManagedSecurityScheme" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Connection auth security details. Required.""" @overload def __init__( self, *, - sequence_number: int, - item_id: str, - output_index: int, - content_index: int, - delta: str, + security_scheme: "_models.OpenApiManagedSecurityScheme", ) -> None: ... @overload @@ -12441,47 +11113,24 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_REFUSAL_DELTA # type: ignore + self.type = OpenApiAuthType.MANAGED_IDENTITY # type: ignore -class ResponseRefusalDoneEvent(ResponseStreamEvent, discriminator="response.refusal.done"): - """Emitted when refusal text is finalized. +class OpenApiManagedSecurityScheme(_Model): + """Security scheme for OpenApi managed_identity authentication. - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always ``response.refusal.done``. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_REFUSAL_DONE - :ivar item_id: The ID of the output item that the refusal text is finalized. Required. - :vartype item_id: str - :ivar output_index: The index of the output item that the refusal text is finalized. Required. - :vartype output_index: int - :ivar content_index: The index of the content part that the refusal text is finalized. - Required. - :vartype content_index: int - :ivar refusal: The refusal text that is finalized. Required. - :vartype refusal: str + :ivar audience: Authentication scope for managed_identity auth type. Required. + :vartype audience: str """ - type: Literal[ResponseStreamEventType.RESPONSE_REFUSAL_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always ``response.refusal.done``. Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The ID of the output item that the refusal text is finalized. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item that the refusal text is finalized. Required.""" - content_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the content part that the refusal text is finalized. Required.""" - refusal: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The refusal text that is finalized. Required.""" + audience: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Authentication scope for managed_identity auth type. Required.""" @overload def __init__( self, *, - sequence_number: int, - item_id: str, - output_index: int, - content_index: int, - refusal: str, + audience: str, ) -> None: ... @overload @@ -12493,36 +11142,29 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_REFUSAL_DONE # type: ignore - -class ResponsesMessageItemParam(ItemParam, discriminator="message"): - """A response message item, representing a role and content, as provided as client request - parameters. - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - ResponsesAssistantMessageItemParam, ResponsesDeveloperMessageItemParam, - ResponsesSystemMessageItemParam, ResponsesUserMessageItemParam +class OpenApiProjectConnectionAuthDetails(OpenApiAuthDetails, discriminator="project_connection"): + """Security details for OpenApi project connection authentication. - :ivar type: The type of the responses item, which is always 'message'. Required. - :vartype type: str or ~azure.ai.projects.models.MESSAGE - :ivar role: The role associated with the message. Required. Known values are: "system", - "developer", "user", and "assistant". - :vartype role: str or ~azure.ai.projects.models.ResponsesMessageRole + :ivar type: The object type, which is always 'project_connection'. Required. + :vartype type: str or ~azure.ai.projects.models.PROJECT_CONNECTION + :ivar security_scheme: Project connection auth security details. Required. + :vartype security_scheme: ~azure.ai.projects.models.OpenApiProjectConnectionSecurityScheme """ - __mapping__: dict[str, _Model] = {} - type: Literal[ItemType.MESSAGE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the responses item, which is always 'message'. Required.""" - role: str = rest_discriminator(name="role", visibility=["read", "create", "update", "delete", "query"]) - """The role associated with the message. Required. Known values are: \"system\", \"developer\", - \"user\", and \"assistant\".""" + type: Literal[OpenApiAuthType.PROJECT_CONNECTION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The object type, which is always 'project_connection'. Required.""" + security_scheme: "_models.OpenApiProjectConnectionSecurityScheme" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Project connection auth security details. Required.""" @overload def __init__( self, *, - role: str, + security_scheme: "_models.OpenApiProjectConnectionSecurityScheme", ) -> None: ... @overload @@ -12534,35 +11176,24 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ItemType.MESSAGE # type: ignore + self.type = OpenApiAuthType.PROJECT_CONNECTION # type: ignore -class ResponsesAssistantMessageItemParam(ResponsesMessageItemParam, discriminator="assistant"): - """A message parameter item with the ``assistant`` role. +class OpenApiProjectConnectionSecurityScheme(_Model): + """Security scheme for OpenApi managed_identity authentication. - :ivar type: The type of the responses item, which is always 'message'. Required. - :vartype type: str or ~azure.ai.projects.models.MESSAGE - :ivar role: The role of the message, which is always ``assistant``. Required. - :vartype role: str or ~azure.ai.projects.models.ASSISTANT - :ivar content: The content associated with the message. Required. Is either a str type or a - [ItemContent] type. - :vartype content: str or list[~azure.ai.projects.models.ItemContent] + :ivar project_connection_id: Project connection id for Project Connection auth type. Required. + :vartype project_connection_id: str """ - __mapping__: dict[str, _Model] = {} - role: Literal[ResponsesMessageRole.ASSISTANT] = rest_discriminator(name="role", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The role of the message, which is always ``assistant``. Required.""" - content: Union["str", list["_models.ItemContent"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The content associated with the message. Required. Is either a str type or a [ItemContent] - type.""" + project_connection_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Project connection id for Project Connection auth type. Required.""" @overload def __init__( self, *, - content: Union[str, list["_models.ItemContent"]], + project_connection_id: str, ) -> None: ... @overload @@ -12574,53 +11205,29 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.role = ResponsesMessageRole.ASSISTANT # type: ignore -class ResponsesMessageItemResource(ItemResource, discriminator="message"): - """A response message resource item, representing a role and content, as provided on service - responses. - - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - ResponsesAssistantMessageItemResource, ResponsesDeveloperMessageItemResource, - ResponsesSystemMessageItemResource, ResponsesUserMessageItemResource +class OpenApiTool(Tool, discriminator="openapi"): + """The input definition information for an OpenAPI tool as used to configure an agent. - :ivar id: Required. - :vartype id: str - :ivar created_by: The information about the creator of the item. - :vartype created_by: ~azure.ai.projects.models.CreatedBy - :ivar type: The type of the responses item, which is always 'message'. Required. - :vartype type: str or ~azure.ai.projects.models.MESSAGE - :ivar status: The status of the item. One of ``in_progress``, ``completed``, or - ``incomplete``. Populated when items are returned via API. Required. Is one of the following - types: Literal["in_progress"], Literal["completed"], Literal["incomplete"] - :vartype status: str or str or str - :ivar role: The role associated with the message. Required. Known values are: "system", - "developer", "user", and "assistant". - :vartype role: str or ~azure.ai.projects.models.ResponsesMessageRole + :ivar type: The object type, which is always 'openapi'. Required. + :vartype type: str or ~azure.ai.projects.models.OPENAPI + :ivar openapi: The openapi function definition. Required. + :vartype openapi: ~azure.ai.projects.models.OpenApiFunctionDefinition """ - __mapping__: dict[str, _Model] = {} - type: Literal[ItemType.MESSAGE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the responses item, which is always 'message'. Required.""" - status: Literal["in_progress", "completed", "incomplete"] = rest_field( + type: Literal[ToolType.OPENAPI] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The object type, which is always 'openapi'. Required.""" + openapi: "_models.OpenApiFunctionDefinition" = rest_field( visibility=["read", "create", "update", "delete", "query"] ) - """The status of the item. One of ``in_progress``, ``completed``, or - ``incomplete``. Populated when items are returned via API. Required. Is one of the following - types: Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"]""" - role: str = rest_discriminator(name="role", visibility=["read", "create", "update", "delete", "query"]) - """The role associated with the message. Required. Known values are: \"system\", \"developer\", - \"user\", and \"assistant\".""" + """The openapi function definition. Required.""" @overload def __init__( self, *, - id: str, # pylint: disable=redefined-builtin - status: Literal["in_progress", "completed", "incomplete"], - role: str, - created_by: Optional["_models.CreatedBy"] = None, + openapi: "_models.OpenApiFunctionDefinition", ) -> None: ... @overload @@ -12632,42 +11239,28 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ItemType.MESSAGE # type: ignore + self.type = ToolType.OPENAPI # type: ignore -class ResponsesAssistantMessageItemResource(ResponsesMessageItemResource, discriminator="assistant"): - """A message resource item with the ``assistant`` role. +class OutputContent(_Model): + """OutputContent. - :ivar id: Required. - :vartype id: str - :ivar created_by: The information about the creator of the item. - :vartype created_by: ~azure.ai.projects.models.CreatedBy - :ivar type: The type of the responses item, which is always 'message'. Required. - :vartype type: str or ~azure.ai.projects.models.MESSAGE - :ivar status: The status of the item. One of ``in_progress``, ``completed``, or - ``incomplete``. Populated when items are returned via API. Required. Is one of the following - types: Literal["in_progress"], Literal["completed"], Literal["incomplete"] - :vartype status: str or str or str - :ivar role: The role of the message, which is always ``assistant``. Required. - :vartype role: str or ~azure.ai.projects.models.ASSISTANT - :ivar content: The content associated with the message. Required. - :vartype content: list[~azure.ai.projects.models.ItemContent] + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + OutputContentOutputTextContent, ReasoningTextContent, OutputContentRefusalContent + + :ivar type: Required. Known values are: "output_text", "refusal", and "reasoning_text". + :vartype type: str or ~azure.ai.projects.models.OutputContentType """ __mapping__: dict[str, _Model] = {} - role: Literal[ResponsesMessageRole.ASSISTANT] = rest_discriminator(name="role", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The role of the message, which is always ``assistant``. Required.""" - content: list["_models.ItemContent"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The content associated with the message. Required.""" + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Required. Known values are: \"output_text\", \"refusal\", and \"reasoning_text\".""" @overload def __init__( self, *, - id: str, # pylint: disable=redefined-builtin - status: Literal["in_progress", "completed", "incomplete"], - content: list["_models.ItemContent"], - created_by: Optional["_models.CreatedBy"] = None, + type: str, ) -> None: ... @overload @@ -12679,35 +11272,27 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.role = ResponsesMessageRole.ASSISTANT # type: ignore -class ResponsesDeveloperMessageItemParam(ResponsesMessageItemParam, discriminator="developer"): - """A message parameter item with the ``developer`` role. +class OutputMessageContent(_Model): + """OutputMessageContent. - :ivar type: The type of the responses item, which is always 'message'. Required. - :vartype type: str or ~azure.ai.projects.models.MESSAGE - :ivar role: The role of the message, which is always ``developer``. Required. - :vartype role: str or ~azure.ai.projects.models.DEVELOPER - :ivar content: The content associated with the message. Required. Is either a str type or a - [ItemContent] type. - :vartype content: str or list[~azure.ai.projects.models.ItemContent] + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + OutputMessageContentOutputTextContent, OutputMessageContentRefusalContent + + :ivar type: Required. Known values are: "output_text" and "refusal". + :vartype type: str or ~azure.ai.projects.models.OutputMessageContentType """ __mapping__: dict[str, _Model] = {} - role: Literal[ResponsesMessageRole.DEVELOPER] = rest_discriminator(name="role", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The role of the message, which is always ``developer``. Required.""" - content: Union["str", list["_models.ItemContent"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The content associated with the message. Required. Is either a str type or a [ItemContent] - type.""" + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Required. Known values are: \"output_text\" and \"refusal\".""" @overload def __init__( self, *, - content: Union[str, list["_models.ItemContent"]], + type: str, ) -> None: ... @overload @@ -12719,42 +11304,36 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.role = ResponsesMessageRole.DEVELOPER # type: ignore -class ResponsesDeveloperMessageItemResource(ResponsesMessageItemResource, discriminator="developer"): - """A message resource item with the ``developer`` role. +class OutputMessageContentOutputTextContent(OutputMessageContent, discriminator="output_text"): + """Output text. - :ivar id: Required. - :vartype id: str - :ivar created_by: The information about the creator of the item. - :vartype created_by: ~azure.ai.projects.models.CreatedBy - :ivar type: The type of the responses item, which is always 'message'. Required. - :vartype type: str or ~azure.ai.projects.models.MESSAGE - :ivar status: The status of the item. One of ``in_progress``, ``completed``, or - ``incomplete``. Populated when items are returned via API. Required. Is one of the following - types: Literal["in_progress"], Literal["completed"], Literal["incomplete"] - :vartype status: str or str or str - :ivar role: The role of the message, which is always ``developer``. Required. - :vartype role: str or ~azure.ai.projects.models.DEVELOPER - :ivar content: The content associated with the message. Required. - :vartype content: list[~azure.ai.projects.models.ItemContent] + :ivar type: The type of the output text. Always ``output_text``. Required. + :vartype type: str or ~azure.ai.projects.models.OUTPUT_TEXT + :ivar text: The text output from the model. Required. + :vartype text: str + :ivar annotations: The annotations of the text output. Required. + :vartype annotations: list[~azure.ai.projects.models.Annotation] + :ivar logprobs: + :vartype logprobs: list[~azure.ai.projects.models.LogProb] """ - __mapping__: dict[str, _Model] = {} - role: Literal[ResponsesMessageRole.DEVELOPER] = rest_discriminator(name="role", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The role of the message, which is always ``developer``. Required.""" - content: list["_models.ItemContent"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The content associated with the message. Required.""" + type: Literal[OutputMessageContentType.OUTPUT_TEXT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the output text. Always ``output_text``. Required.""" + text: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The text output from the model. Required.""" + annotations: list["_models.Annotation"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The annotations of the text output. Required.""" + logprobs: Optional[list["_models.LogProb"]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) @overload def __init__( self, *, - id: str, # pylint: disable=redefined-builtin - status: Literal["in_progress", "completed", "incomplete"], - content: list["_models.ItemContent"], - created_by: Optional["_models.CreatedBy"] = None, + text: str, + annotations: list["_models.Annotation"], + logprobs: Optional[list["_models.LogProb"]] = None, ) -> None: ... @overload @@ -12766,35 +11345,28 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.role = ResponsesMessageRole.DEVELOPER # type: ignore + self.type = OutputMessageContentType.OUTPUT_TEXT # type: ignore -class ResponsesSystemMessageItemParam(ResponsesMessageItemParam, discriminator="system"): - """A message parameter item with the ``system`` role. +class OutputMessageContentRefusalContent(OutputMessageContent, discriminator="refusal"): + """Refusal. - :ivar type: The type of the responses item, which is always 'message'. Required. - :vartype type: str or ~azure.ai.projects.models.MESSAGE - :ivar role: The role of the message, which is always ``system``. Required. - :vartype role: str or ~azure.ai.projects.models.SYSTEM - :ivar content: The content associated with the message. Required. Is either a str type or a - [ItemContent] type. - :vartype content: str or list[~azure.ai.projects.models.ItemContent] + :ivar type: The type of the refusal. Always ``refusal``. Required. + :vartype type: str or ~azure.ai.projects.models.REFUSAL + :ivar refusal: The refusal explanation from the model. Required. + :vartype refusal: str """ - __mapping__: dict[str, _Model] = {} - role: Literal[ResponsesMessageRole.SYSTEM] = rest_discriminator(name="role", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The role of the message, which is always ``system``. Required.""" - content: Union["str", list["_models.ItemContent"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The content associated with the message. Required. Is either a str type or a [ItemContent] - type.""" + type: Literal[OutputMessageContentType.REFUSAL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the refusal. Always ``refusal``. Required.""" + refusal: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The refusal explanation from the model. Required.""" @overload def __init__( self, *, - content: Union[str, list["_models.ItemContent"]], + refusal: str, ) -> None: ... @overload @@ -12806,42 +11378,42 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.role = ResponsesMessageRole.SYSTEM # type: ignore + self.type = OutputMessageContentType.REFUSAL # type: ignore -class ResponsesSystemMessageItemResource(ResponsesMessageItemResource, discriminator="system"): - """A message resource item with the ``system`` role. +class PendingUploadRequest(_Model): + """Represents a request for a pending upload. - :ivar id: Required. - :vartype id: str - :ivar created_by: The information about the creator of the item. - :vartype created_by: ~azure.ai.projects.models.CreatedBy - :ivar type: The type of the responses item, which is always 'message'. Required. - :vartype type: str or ~azure.ai.projects.models.MESSAGE - :ivar status: The status of the item. One of ``in_progress``, ``completed``, or - ``incomplete``. Populated when items are returned via API. Required. Is one of the following - types: Literal["in_progress"], Literal["completed"], Literal["incomplete"] - :vartype status: str or str or str - :ivar role: The role of the message, which is always ``system``. Required. - :vartype role: str or ~azure.ai.projects.models.SYSTEM - :ivar content: The content associated with the message. Required. - :vartype content: list[~azure.ai.projects.models.ItemContent] + :ivar pending_upload_id: If PendingUploadId is not provided, a random GUID will be used. + :vartype pending_upload_id: str + :ivar connection_name: Azure Storage Account connection name to use for generating temporary + SAS token. + :vartype connection_name: str + :ivar pending_upload_type: BlobReference is the only supported type. Required. Blob Reference + is the only supported type. + :vartype pending_upload_type: str or ~azure.ai.projects.models.BLOB_REFERENCE """ - __mapping__: dict[str, _Model] = {} - role: Literal[ResponsesMessageRole.SYSTEM] = rest_discriminator(name="role", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The role of the message, which is always ``system``. Required.""" - content: list["_models.ItemContent"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The content associated with the message. Required.""" + pending_upload_id: Optional[str] = rest_field( + name="pendingUploadId", visibility=["read", "create", "update", "delete", "query"] + ) + """If PendingUploadId is not provided, a random GUID will be used.""" + connection_name: Optional[str] = rest_field( + name="connectionName", visibility=["read", "create", "update", "delete", "query"] + ) + """Azure Storage Account connection name to use for generating temporary SAS token.""" + pending_upload_type: Literal[PendingUploadType.BLOB_REFERENCE] = rest_field( + name="pendingUploadType", visibility=["read", "create", "update", "delete", "query"] + ) + """BlobReference is the only supported type. Required. Blob Reference is the only supported type.""" @overload def __init__( self, *, - id: str, # pylint: disable=redefined-builtin - status: Literal["in_progress", "completed", "incomplete"], - content: list["_models.ItemContent"], - created_by: Optional["_models.CreatedBy"] = None, + pending_upload_type: Literal[PendingUploadType.BLOB_REFERENCE], + pending_upload_id: Optional[str] = None, + connection_name: Optional[str] = None, ) -> None: ... @overload @@ -12853,35 +11425,46 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.role = ResponsesMessageRole.SYSTEM # type: ignore -class ResponsesUserMessageItemParam(ResponsesMessageItemParam, discriminator="user"): - """A message parameter item with the ``user`` role. +class PendingUploadResponse(_Model): + """Represents the response for a pending upload request. - :ivar type: The type of the responses item, which is always 'message'. Required. - :vartype type: str or ~azure.ai.projects.models.MESSAGE - :ivar role: The role of the message, which is always ``user``. Required. - :vartype role: str or ~azure.ai.projects.models.USER - :ivar content: The content associated with the message. Required. Is either a str type or a - [ItemContent] type. - :vartype content: str or list[~azure.ai.projects.models.ItemContent] + :ivar blob_reference: Container-level read, write, list SAS. Required. + :vartype blob_reference: ~azure.ai.projects.models.BlobReference + :ivar pending_upload_id: ID for this upload request. Required. + :vartype pending_upload_id: str + :ivar version: Version of asset to be created if user did not specify version when initially + creating upload. + :vartype version: str + :ivar pending_upload_type: BlobReference is the only supported type. Required. Blob Reference + is the only supported type. + :vartype pending_upload_type: str or ~azure.ai.projects.models.BLOB_REFERENCE """ - __mapping__: dict[str, _Model] = {} - role: Literal[ResponsesMessageRole.USER] = rest_discriminator(name="role", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The role of the message, which is always ``user``. Required.""" - content: Union["str", list["_models.ItemContent"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] + blob_reference: "_models.BlobReference" = rest_field( + name="blobReference", visibility=["read", "create", "update", "delete", "query"] + ) + """Container-level read, write, list SAS. Required.""" + pending_upload_id: str = rest_field( + name="pendingUploadId", visibility=["read", "create", "update", "delete", "query"] + ) + """ID for this upload request. Required.""" + version: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Version of asset to be created if user did not specify version when initially creating upload.""" + pending_upload_type: Literal[PendingUploadType.BLOB_REFERENCE] = rest_field( + name="pendingUploadType", visibility=["read", "create", "update", "delete", "query"] ) - """The content associated with the message. Required. Is either a str type or a [ItemContent] - type.""" + """BlobReference is the only supported type. Required. Blob Reference is the only supported type.""" @overload def __init__( self, *, - content: Union[str, list["_models.ItemContent"]], + blob_reference: "_models.BlobReference", + pending_upload_id: str, + pending_upload_type: Literal[PendingUploadType.BLOB_REFERENCE], + version: Optional[str] = None, ) -> None: ... @overload @@ -12893,42 +11476,89 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.role = ResponsesMessageRole.USER # type: ignore -class ResponsesUserMessageItemResource(ResponsesMessageItemResource, discriminator="user"): - """A message resource item with the ``user`` role. +class PromptAgentDefinition(AgentDefinition, discriminator="prompt"): + """The prompt agent definition. - :ivar id: Required. - :vartype id: str - :ivar created_by: The information about the creator of the item. - :vartype created_by: ~azure.ai.projects.models.CreatedBy - :ivar type: The type of the responses item, which is always 'message'. Required. - :vartype type: str or ~azure.ai.projects.models.MESSAGE - :ivar status: The status of the item. One of ``in_progress``, ``completed``, or - ``incomplete``. Populated when items are returned via API. Required. Is one of the following - types: Literal["in_progress"], Literal["completed"], Literal["incomplete"] - :vartype status: str or str or str - :ivar role: The role of the message, which is always ``user``. Required. - :vartype role: str or ~azure.ai.projects.models.USER - :ivar content: The content associated with the message. Required. - :vartype content: list[~azure.ai.projects.models.ItemContent] + :ivar rai_config: Configuration for Responsible AI (RAI) content filtering and safety features. + :vartype rai_config: ~azure.ai.projects.models.RaiConfig + :ivar kind: Required. + :vartype kind: str or ~azure.ai.projects.models.PROMPT + :ivar model: The model deployment to use for this agent. Required. + :vartype model: str + :ivar instructions: A system (or developer) message inserted into the model's context. + :vartype instructions: str + :ivar temperature: What sampling temperature to use, between 0 and 2. Higher values like 0.8 + will make the output more random, while lower values like 0.2 will make it more focused and + deterministic. + We generally recommend altering this or ``top_p`` but not both. + :vartype temperature: float + :ivar top_p: An alternative to sampling with temperature, called nucleus sampling, + where the model considers the results of the tokens with top_p probability + mass. So 0.1 means only the tokens comprising the top 10% probability mass + are considered. + + We generally recommend altering this or ``temperature`` but not both. + :vartype top_p: float + :ivar reasoning: + :vartype reasoning: ~azure.ai.projects.models.Reasoning + :ivar tools: An array of tools the model may call while generating a response. You + can specify which tool to use by setting the ``tool_choice`` parameter. + :vartype tools: list[~azure.ai.projects.models.Tool] + :ivar text: Configuration options for a text response from the model. Can be plain text or + structured JSON data. + :vartype text: ~azure.ai.projects.models.PromptAgentDefinitionText + :ivar structured_inputs: Set of structured inputs that can participate in prompt template + substitution or tool argument bindings. + :vartype structured_inputs: dict[str, ~azure.ai.projects.models.StructuredInputDefinition] """ - __mapping__: dict[str, _Model] = {} - role: Literal[ResponsesMessageRole.USER] = rest_discriminator(name="role", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The role of the message, which is always ``user``. Required.""" - content: list["_models.ItemContent"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The content associated with the message. Required.""" + kind: Literal[AgentKind.PROMPT] = rest_discriminator(name="kind", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + model: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The model deployment to use for this agent. Required.""" + instructions: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A system (or developer) message inserted into the model's context.""" + temperature: Optional[float] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output + more random, while lower values like 0.2 will make it more focused and deterministic. + We generally recommend altering this or ``top_p`` but not both.""" + top_p: Optional[float] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """An alternative to sampling with temperature, called nucleus sampling, + where the model considers the results of the tokens with top_p probability + mass. So 0.1 means only the tokens comprising the top 10% probability mass + are considered. + + We generally recommend altering this or ``temperature`` but not both.""" + reasoning: Optional["_models.Reasoning"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + tools: Optional[list["_models.Tool"]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """An array of tools the model may call while generating a response. You + can specify which tool to use by setting the ``tool_choice`` parameter.""" + text: Optional["_models.PromptAgentDefinitionText"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Configuration options for a text response from the model. Can be plain text or structured JSON + data.""" + structured_inputs: Optional[dict[str, "_models.StructuredInputDefinition"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Set of structured inputs that can participate in prompt template substitution or tool argument + bindings.""" @overload def __init__( self, *, - id: str, # pylint: disable=redefined-builtin - status: Literal["in_progress", "completed", "incomplete"], - content: list["_models.ItemContent"], - created_by: Optional["_models.CreatedBy"] = None, + model: str, + rai_config: Optional["_models.RaiConfig"] = None, + instructions: Optional[str] = None, + temperature: Optional[float] = None, + top_p: Optional[float] = None, + reasoning: Optional["_models.Reasoning"] = None, + tools: Optional[list["_models.Tool"]] = None, + text: Optional["_models.PromptAgentDefinitionText"] = None, + structured_inputs: Optional[dict[str, "_models.StructuredInputDefinition"]] = None, ) -> None: ... @overload @@ -12940,17 +11570,17 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.role = ResponsesMessageRole.USER # type: ignore + self.kind = AgentKind.PROMPT # type: ignore -class ResponseText(_Model): - """ResponseText. +class PromptAgentDefinitionText(_Model): + """PromptAgentDefinitionText. :ivar format: - :vartype format: ~azure.ai.projects.models.ResponseTextFormatConfiguration + :vartype format: ~azure.ai.projects.models.TextResponseFormatConfiguration """ - format: Optional["_models.ResponseTextFormatConfiguration"] = rest_field( + format: Optional["_models.TextResponseFormatConfiguration"] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) @@ -12958,7 +11588,7 @@ class ResponseText(_Model): def __init__( self, *, - format: Optional["_models.ResponseTextFormatConfiguration"] = None, + format: Optional["_models.TextResponseFormatConfiguration"] = None, ) -> None: ... @overload @@ -12972,43 +11602,36 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class ResponseTextDeltaEvent(ResponseStreamEvent, discriminator="response.output_text.delta"): - """Emitted when there is an additional text delta. +class PromptBasedEvaluatorDefinition(EvaluatorDefinition, discriminator="prompt"): + """Prompt-based evaluator. - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always ``response.output_text.delta``. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_OUTPUT_TEXT_DELTA - :ivar item_id: The ID of the output item that the text delta was added to. Required. - :vartype item_id: str - :ivar output_index: The index of the output item that the text delta was added to. Required. - :vartype output_index: int - :ivar content_index: The index of the content part that the text delta was added to. Required. - :vartype content_index: int - :ivar delta: The text delta that was added. Required. - :vartype delta: str + :ivar init_parameters: The JSON schema (Draft 2020-12) for the evaluator's input parameters. + This includes parameters like type, properties, required. + :vartype init_parameters: any + :ivar data_schema: The JSON schema (Draft 2020-12) for the evaluator's input data. This + includes parameters like type, properties, required. + :vartype data_schema: any + :ivar metrics: List of output metrics produced by this evaluator. + :vartype metrics: dict[str, ~azure.ai.projects.models.EvaluatorMetric] + :ivar type: Required. Prompt-based definition + :vartype type: str or ~azure.ai.projects.models.PROMPT + :ivar prompt_text: The prompt text used for evaluation. Required. + :vartype prompt_text: str """ - type: Literal[ResponseStreamEventType.RESPONSE_OUTPUT_TEXT_DELTA] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always ``response.output_text.delta``. Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The ID of the output item that the text delta was added to. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item that the text delta was added to. Required.""" - content_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the content part that the text delta was added to. Required.""" - delta: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The text delta that was added. Required.""" + type: Literal[EvaluatorDefinitionType.PROMPT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required. Prompt-based definition""" + prompt_text: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The prompt text used for evaluation. Required.""" @overload def __init__( self, *, - sequence_number: int, - item_id: str, - output_index: int, - content_index: int, - delta: str, + prompt_text: str, + init_parameters: Optional[Any] = None, + data_schema: Optional[Any] = None, + metrics: Optional[dict[str, "_models.EvaluatorMetric"]] = None, ) -> None: ... @overload @@ -13020,47 +11643,32 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_OUTPUT_TEXT_DELTA # type: ignore + self.type = EvaluatorDefinitionType.PROMPT # type: ignore -class ResponseTextDoneEvent(ResponseStreamEvent, discriminator="response.output_text.done"): - """Emitted when text content is finalized. +class ProtocolVersionRecord(_Model): + """A record mapping for a single protocol and its version. - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always ``response.output_text.done``. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_OUTPUT_TEXT_DONE - :ivar item_id: The ID of the output item that the text content is finalized. Required. - :vartype item_id: str - :ivar output_index: The index of the output item that the text content is finalized. Required. - :vartype output_index: int - :ivar content_index: The index of the content part that the text content is finalized. - Required. - :vartype content_index: int - :ivar text: The text content that is finalized. Required. - :vartype text: str + :ivar protocol: The protocol type. Required. Known values are: "activity_protocol" and + "responses". + :vartype protocol: str or ~azure.ai.projects.models.AgentProtocol + :ivar version: The version string for the protocol, e.g. 'v0.1.1'. Required. + :vartype version: str """ - type: Literal[ResponseStreamEventType.RESPONSE_OUTPUT_TEXT_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always ``response.output_text.done``. Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The ID of the output item that the text content is finalized. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item that the text content is finalized. Required.""" - content_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the content part that the text content is finalized. Required.""" - text: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The text content that is finalized. Required.""" + protocol: Union[str, "_models.AgentProtocol"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The protocol type. Required. Known values are: \"activity_protocol\" and \"responses\".""" + version: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The version string for the protocol, e.g. 'v0.1.1'. Required.""" @overload def __init__( self, *, - sequence_number: int, - item_id: str, - output_index: int, - content_index: int, - text: str, + protocol: Union[str, "_models.AgentProtocol"], + version: str, ) -> None: ... @overload @@ -13072,29 +11680,23 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_OUTPUT_TEXT_DONE # type: ignore - -class ResponseTextFormatConfiguration(_Model): - """ResponseTextFormatConfiguration. - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - ResponseTextFormatConfigurationJsonObject, ResponseTextFormatConfigurationJsonSchema, - ResponseTextFormatConfigurationText +class RaiConfig(_Model): + """Configuration for Responsible AI (RAI) content filtering and safety features. - :ivar type: Required. Known values are: "text", "json_schema", and "json_object". - :vartype type: str or ~azure.ai.projects.models.ResponseTextFormatConfigurationType + :ivar rai_policy_name: The name of the RAI policy to apply. Required. + :vartype rai_policy_name: str """ - __mapping__: dict[str, _Model] = {} - type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) - """Required. Known values are: \"text\", \"json_schema\", and \"json_object\".""" + rai_policy_name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the RAI policy to apply. Required.""" @overload def __init__( self, *, - type: str, + rai_policy_name: str, ) -> None: ... @overload @@ -13108,21 +11710,41 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class ResponseTextFormatConfigurationJsonObject( - ResponseTextFormatConfiguration, discriminator="json_object" -): # pylint: disable=name-too-long - """ResponseTextFormatConfigurationJsonObject. +class RankingOptions(_Model): + """RankingOptions. - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.JSON_OBJECT + :ivar ranker: The ranker to use for the file search. Known values are: "auto" and + "default-2024-11-15". + :vartype ranker: str or ~azure.ai.projects.models.RankerVersionType + :ivar score_threshold: The score threshold for the file search, a number between 0 and 1. + Numbers closer to 1 will attempt to return only the most relevant results, but may return fewer + results. + :vartype score_threshold: float + :ivar hybrid_search: Weights that control how reciprocal rank fusion balances semantic + embedding matches versus sparse keyword matches when hybrid search is enabled. + :vartype hybrid_search: ~azure.ai.projects.models.HybridSearchOptions """ - type: Literal[ResponseTextFormatConfigurationType.JSON_OBJECT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" + ranker: Optional[Union[str, "_models.RankerVersionType"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The ranker to use for the file search. Known values are: \"auto\" and \"default-2024-11-15\".""" + score_threshold: Optional[float] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The score threshold for the file search, a number between 0 and 1. Numbers closer to 1 will + attempt to return only the most relevant results, but may return fewer results.""" + hybrid_search: Optional["_models.HybridSearchOptions"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Weights that control how reciprocal rank fusion balances semantic embedding matches versus + sparse keyword matches when hybrid search is enabled.""" @overload def __init__( self, + *, + ranker: Optional[Union[str, "_models.RankerVersionType"]] = None, + score_threshold: Optional[float] = None, + hybrid_search: Optional["_models.HybridSearchOptions"] = None, ) -> None: ... @overload @@ -13134,59 +11756,43 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ResponseTextFormatConfigurationType.JSON_OBJECT # type: ignore -class ResponseTextFormatConfigurationJsonSchema( - ResponseTextFormatConfiguration, discriminator="json_schema" -): # pylint: disable=name-too-long - """JSON Schema response format. Used to generate structured JSON responses. - Learn more about `Structured Outputs - `_. +class Reasoning(_Model): + """Reasoning. - :ivar type: The type of response format being defined. Always ``json_schema``. Required. - :vartype type: str or ~azure.ai.projects.models.JSON_SCHEMA - :ivar description: A description of what the response format is for, used by the model to - determine how to respond in the format. - :vartype description: str - :ivar name: The name of the response format. Must be a-z, A-Z, 0-9, or contain - underscores and dashes, with a maximum length of 64. Required. - :vartype name: str - :ivar schema: Required. - :vartype schema: dict[str, any] - :ivar strict: Whether to enable strict schema adherence when generating the output. - If set to true, the model will always follow the exact schema defined - in the ``schema`` field. Only a subset of JSON Schema is supported when - ``strict`` is ``true``. To learn more, read the `Structured Outputs - guide `_. - :vartype strict: bool + :ivar effort: Is one of the following types: Literal["none"], Literal["minimal"], + Literal["low"], Literal["medium"], Literal["high"], Literal["xhigh"] + :vartype effort: str or str or str or str or str or str + :ivar summary: Is one of the following types: Literal["auto"], Literal["concise"], + Literal["detailed"] + :vartype summary: str or str or str + :ivar generate_summary: Is one of the following types: Literal["auto"], Literal["concise"], + Literal["detailed"] + :vartype generate_summary: str or str or str """ - type: Literal[ResponseTextFormatConfigurationType.JSON_SCHEMA] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of response format being defined. Always ``json_schema``. Required.""" - description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """A description of what the response format is for, used by the model to - determine how to respond in the format.""" - name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The name of the response format. Must be a-z, A-Z, 0-9, or contain - underscores and dashes, with a maximum length of 64. Required.""" - schema: dict[str, Any] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Required.""" - strict: Optional[bool] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Whether to enable strict schema adherence when generating the output. - If set to true, the model will always follow the exact schema defined - in the ``schema`` field. Only a subset of JSON Schema is supported when - ``strict`` is ``true``. To learn more, read the `Structured Outputs - guide `_.""" + effort: Optional[Literal["none", "minimal", "low", "medium", "high", "xhigh"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Is one of the following types: Literal[\"none\"], Literal[\"minimal\"], Literal[\"low\"], + Literal[\"medium\"], Literal[\"high\"], Literal[\"xhigh\"]""" + summary: Optional[Literal["auto", "concise", "detailed"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Is one of the following types: Literal[\"auto\"], Literal[\"concise\"], Literal[\"detailed\"]""" + generate_summary: Optional[Literal["auto", "concise", "detailed"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Is one of the following types: Literal[\"auto\"], Literal[\"concise\"], Literal[\"detailed\"]""" @overload def __init__( self, *, - name: str, - schema: dict[str, Any], - description: Optional[str] = None, - strict: Optional[bool] = None, + effort: Optional[Literal["none", "minimal", "low", "medium", "high", "xhigh"]] = None, + summary: Optional[Literal["auto", "concise", "detailed"]] = None, + generate_summary: Optional[Literal["auto", "concise", "detailed"]] = None, ) -> None: ... @overload @@ -13198,22 +11804,27 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ResponseTextFormatConfigurationType.JSON_SCHEMA # type: ignore -class ResponseTextFormatConfigurationText(ResponseTextFormatConfiguration, discriminator="text"): - """ResponseTextFormatConfigurationText. +class ReasoningTextContent(OutputContent, discriminator="reasoning_text"): + """ReasoningTextContent. - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.TEXT + :ivar type: The type of the reasoning text. Always ``reasoning_text``. Required. + :vartype type: str or ~azure.ai.projects.models.REASONING_TEXT + :ivar text: The reasoning text from the model. Required. + :vartype text: str """ - type: Literal[ResponseTextFormatConfigurationType.TEXT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" + type: Literal[OutputContentType.REASONING_TEXT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the reasoning text. Always ``reasoning_text``. Required.""" + text: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The reasoning text from the model. Required.""" @overload def __init__( self, + *, + text: str, ) -> None: ... @overload @@ -13225,51 +11836,48 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ResponseTextFormatConfigurationType.TEXT # type: ignore + self.type = OutputContentType.REASONING_TEXT # type: ignore -class ResponseUsage(_Model): - """Represents token usage details including input tokens, output tokens, - a breakdown of output tokens, and the total tokens used. +class RecurrenceTrigger(Trigger, discriminator="Recurrence"): + """Recurrence based trigger. - :ivar input_tokens: The number of input tokens. Required. - :vartype input_tokens: int - :ivar input_tokens_details: A detailed breakdown of the input tokens. Required. - :vartype input_tokens_details: - ~azure.ai.projects.models.MemoryStoreOperationUsageInputTokensDetails - :ivar output_tokens: The number of output tokens. Required. - :vartype output_tokens: int - :ivar output_tokens_details: A detailed breakdown of the output tokens. Required. - :vartype output_tokens_details: - ~azure.ai.projects.models.MemoryStoreOperationUsageOutputTokensDetails - :ivar total_tokens: The total number of tokens used. Required. - :vartype total_tokens: int + :ivar type: Type of the trigger. Required. Recurrence based trigger. + :vartype type: str or ~azure.ai.projects.models.RECURRENCE + :ivar start_time: Start time for the recurrence schedule in ISO 8601 format. + :vartype start_time: str + :ivar end_time: End time for the recurrence schedule in ISO 8601 format. + :vartype end_time: str + :ivar time_zone: Time zone for the recurrence schedule. + :vartype time_zone: str + :ivar interval: Interval for the recurrence schedule. Required. + :vartype interval: int + :ivar schedule: Recurrence schedule for the recurrence trigger. Required. + :vartype schedule: ~azure.ai.projects.models.RecurrenceSchedule """ - input_tokens: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The number of input tokens. Required.""" - input_tokens_details: "_models.MemoryStoreOperationUsageInputTokensDetails" = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """A detailed breakdown of the input tokens. Required.""" - output_tokens: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The number of output tokens. Required.""" - output_tokens_details: "_models.MemoryStoreOperationUsageOutputTokensDetails" = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """A detailed breakdown of the output tokens. Required.""" - total_tokens: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The total number of tokens used. Required.""" + type: Literal[TriggerType.RECURRENCE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Type of the trigger. Required. Recurrence based trigger.""" + start_time: Optional[str] = rest_field(name="startTime", visibility=["read", "create", "update", "delete", "query"]) + """Start time for the recurrence schedule in ISO 8601 format.""" + end_time: Optional[str] = rest_field(name="endTime", visibility=["read", "create", "update", "delete", "query"]) + """End time for the recurrence schedule in ISO 8601 format.""" + time_zone: Optional[str] = rest_field(name="timeZone", visibility=["read", "create", "update", "delete", "query"]) + """Time zone for the recurrence schedule.""" + interval: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Interval for the recurrence schedule. Required.""" + schedule: "_models.RecurrenceSchedule" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Recurrence schedule for the recurrence trigger. Required.""" @overload def __init__( self, *, - input_tokens: int, - input_tokens_details: "_models.MemoryStoreOperationUsageInputTokensDetails", - output_tokens: int, - output_tokens_details: "_models.MemoryStoreOperationUsageOutputTokensDetails", - total_tokens: int, + interval: int, + schedule: "_models.RecurrenceSchedule", + start_time: Optional[str] = None, + end_time: Optional[str] = None, + time_zone: Optional[str] = None, ) -> None: ... @overload @@ -13281,36 +11889,87 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) + self.type = TriggerType.RECURRENCE # type: ignore -class ResponseWebSearchCallCompletedEvent(ResponseStreamEvent, discriminator="response.web_search_call.completed"): - """Note: web_search is not yet available via Azure OpenAI. +class RedTeam(_Model): + """Red team details. - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always ``response.web_search_call.completed``. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_WEB_SEARCH_CALL_COMPLETED - :ivar output_index: The index of the output item that the web search call is associated with. - Required. - :vartype output_index: int - :ivar item_id: Unique ID for the output item associated with the web search call. Required. - :vartype item_id: str + :ivar name: Identifier of the red team run. Required. + :vartype name: str + :ivar display_name: Name of the red-team run. + :vartype display_name: str + :ivar num_turns: Number of simulation rounds. + :vartype num_turns: int + :ivar attack_strategies: List of attack strategies or nested lists of attack strategies. + :vartype attack_strategies: list[str or ~azure.ai.projects.models.AttackStrategy] + :ivar simulation_only: Simulation-only or Simulation + Evaluation. Default false, if true the + scan outputs conversation not evaluation result. + :vartype simulation_only: bool + :ivar risk_categories: List of risk categories to generate attack objectives for. + :vartype risk_categories: list[str or ~azure.ai.projects.models.RiskCategory] + :ivar application_scenario: Application scenario for the red team operation, to generate + scenario specific attacks. + :vartype application_scenario: str + :ivar tags: Red team's tags. Unlike properties, tags are fully mutable. + :vartype tags: dict[str, str] + :ivar properties: Red team's properties. Unlike tags, properties are add-only. Once added, a + property cannot be removed. + :vartype properties: dict[str, str] + :ivar status: Status of the red-team. It is set by service and is read-only. + :vartype status: str + :ivar target: Target configuration for the red-team run. Required. + :vartype target: ~azure.ai.projects.models.TargetConfig """ - type: Literal[ResponseStreamEventType.RESPONSE_WEB_SEARCH_CALL_COMPLETED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always ``response.web_search_call.completed``. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item that the web search call is associated with. Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Unique ID for the output item associated with the web search call. Required.""" + name: str = rest_field(name="id", visibility=["read"]) + """Identifier of the red team run. Required.""" + display_name: Optional[str] = rest_field( + name="displayName", visibility=["read", "create", "update", "delete", "query"] + ) + """Name of the red-team run.""" + num_turns: Optional[int] = rest_field(name="numTurns", visibility=["read", "create", "update", "delete", "query"]) + """Number of simulation rounds.""" + attack_strategies: Optional[list[Union[str, "_models.AttackStrategy"]]] = rest_field( + name="attackStrategies", visibility=["read", "create", "update", "delete", "query"] + ) + """List of attack strategies or nested lists of attack strategies.""" + simulation_only: Optional[bool] = rest_field( + name="simulationOnly", visibility=["read", "create", "update", "delete", "query"] + ) + """Simulation-only or Simulation + Evaluation. Default false, if true the scan outputs + conversation not evaluation result.""" + risk_categories: Optional[list[Union[str, "_models.RiskCategory"]]] = rest_field( + name="riskCategories", visibility=["read", "create", "update", "delete", "query"] + ) + """List of risk categories to generate attack objectives for.""" + application_scenario: Optional[str] = rest_field( + name="applicationScenario", visibility=["read", "create", "update", "delete", "query"] + ) + """Application scenario for the red team operation, to generate scenario specific attacks.""" + tags: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Red team's tags. Unlike properties, tags are fully mutable.""" + properties: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Red team's properties. Unlike tags, properties are add-only. Once added, a property cannot be + removed.""" + status: Optional[str] = rest_field(visibility=["read"]) + """Status of the red-team. It is set by service and is read-only.""" + target: "_models.TargetConfig" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Target configuration for the red-team run. Required.""" @overload def __init__( self, *, - sequence_number: int, - output_index: int, - item_id: str, + target: "_models.TargetConfig", + display_name: Optional[str] = None, + num_turns: Optional[int] = None, + attack_strategies: Optional[list[Union[str, "_models.AttackStrategy"]]] = None, + simulation_only: Optional[bool] = None, + risk_categories: Optional[list[Union[str, "_models.RiskCategory"]]] = None, + application_scenario: Optional[str] = None, + tags: Optional[dict[str, str]] = None, + properties: Optional[dict[str, str]] = None, ) -> None: ... @overload @@ -13322,37 +11981,23 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_WEB_SEARCH_CALL_COMPLETED # type: ignore -class ResponseWebSearchCallInProgressEvent(ResponseStreamEvent, discriminator="response.web_search_call.in_progress"): - """Note: web_search is not yet available via Azure OpenAI. +class ResponseUsageInputTokensDetails(_Model): + """ResponseUsageInputTokensDetails. - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always ``response.web_search_call.in_progress``. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_WEB_SEARCH_CALL_IN_PROGRESS - :ivar output_index: The index of the output item that the web search call is associated with. - Required. - :vartype output_index: int - :ivar item_id: Unique ID for the output item associated with the web search call. Required. - :vartype item_id: str + :ivar cached_tokens: Required. + :vartype cached_tokens: int """ - type: Literal[ResponseStreamEventType.RESPONSE_WEB_SEARCH_CALL_IN_PROGRESS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always ``response.web_search_call.in_progress``. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item that the web search call is associated with. Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Unique ID for the output item associated with the web search call. Required.""" + cached_tokens: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required.""" @overload def __init__( self, *, - sequence_number: int, - output_index: int, - item_id: str, + cached_tokens: int, ) -> None: ... @overload @@ -13364,37 +12009,23 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_WEB_SEARCH_CALL_IN_PROGRESS # type: ignore -class ResponseWebSearchCallSearchingEvent(ResponseStreamEvent, discriminator="response.web_search_call.searching"): - """Note: web_search is not yet available via Azure OpenAI. +class ResponseUsageOutputTokensDetails(_Model): + """ResponseUsageOutputTokensDetails. - :ivar sequence_number: The sequence number for this event. Required. - :vartype sequence_number: int - :ivar type: The type of the event. Always ``response.web_search_call.searching``. Required. - :vartype type: str or ~azure.ai.projects.models.RESPONSE_WEB_SEARCH_CALL_SEARCHING - :ivar output_index: The index of the output item that the web search call is associated with. - Required. - :vartype output_index: int - :ivar item_id: Unique ID for the output item associated with the web search call. Required. - :vartype item_id: str + :ivar reasoning_tokens: Required. + :vartype reasoning_tokens: int """ - type: Literal[ResponseStreamEventType.RESPONSE_WEB_SEARCH_CALL_SEARCHING] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The type of the event. Always ``response.web_search_call.searching``. Required.""" - output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The index of the output item that the web search call is associated with. Required.""" - item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Unique ID for the output item associated with the web search call. Required.""" + reasoning_tokens: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required.""" @overload def __init__( self, *, - sequence_number: int, - output_index: int, - item_id: str, + reasoning_tokens: int, ) -> None: ... @overload @@ -13406,7 +12037,6 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ResponseStreamEventType.RESPONSE_WEB_SEARCH_CALL_SEARCHING # type: ignore class SASCredentials(BaseCredentials, discriminator="SAS"): @@ -13569,28 +12199,71 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class SharepointAgentTool(Tool, discriminator="sharepoint_grounding_preview"): - """The input definition information for a sharepoint tool as used to configure an agent. +class Screenshot(ComputerAction, discriminator="screenshot"): + """Screenshot. - :ivar type: The object type, which is always 'sharepoint_grounding'. Required. - :vartype type: str or ~azure.ai.projects.models.SHAREPOINT_GROUNDING_PREVIEW - :ivar sharepoint_grounding_preview: The sharepoint grounding tool parameters. Required. - :vartype sharepoint_grounding_preview: - ~azure.ai.projects.models.SharepointGroundingToolParameters + :ivar type: Specifies the event type. For a screenshot action, this property is + always set to ``screenshot``. Required. + :vartype type: str or ~azure.ai.projects.models.SCREENSHOT """ - type: Literal[ToolType.SHAREPOINT_GROUNDING_PREVIEW] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The object type, which is always 'sharepoint_grounding'. Required.""" - sharepoint_grounding_preview: "_models.SharepointGroundingToolParameters" = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The sharepoint grounding tool parameters. Required.""" + type: Literal[ComputerActionType.SCREENSHOT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Specifies the event type. For a screenshot action, this property is + always set to ``screenshot``. Required.""" + + @overload + def __init__( + self, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ComputerActionType.SCREENSHOT # type: ignore + + +class Scroll(ComputerAction, discriminator="scroll"): + """Scroll. + + :ivar type: Specifies the event type. For a scroll action, this property is + always set to ``scroll``. Required. + :vartype type: str or ~azure.ai.projects.models.SCROLL + :ivar x: The x-coordinate where the scroll occurred. Required. + :vartype x: int + :ivar y: The y-coordinate where the scroll occurred. Required. + :vartype y: int + :ivar scroll_x: The horizontal scroll distance. Required. + :vartype scroll_x: int + :ivar scroll_y: The vertical scroll distance. Required. + :vartype scroll_y: int + """ + + type: Literal[ComputerActionType.SCROLL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Specifies the event type. For a scroll action, this property is + always set to ``scroll``. Required.""" + x: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The x-coordinate where the scroll occurred. Required.""" + y: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The y-coordinate where the scroll occurred. Required.""" + scroll_x: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The horizontal scroll distance. Required.""" + scroll_y: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The vertical scroll distance. Required.""" @overload def __init__( self, *, - sharepoint_grounding_preview: "_models.SharepointGroundingToolParameters", + x: int, + y: int, + scroll_x: int, + scroll_y: int, ) -> None: ... @overload @@ -13602,7 +12275,7 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ToolType.SHAREPOINT_GROUNDING_PREVIEW # type: ignore + self.type = ComputerActionType.SCROLL # type: ignore class SharepointGroundingToolParameters(_Model): @@ -13638,6 +12311,42 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) +class SharepointPreviewTool(Tool, discriminator="sharepoint_grounding_preview"): + """The input definition information for a sharepoint tool as used to configure an agent. + + :ivar type: The object type, which is always 'sharepoint_grounding_preview'. Required. + :vartype type: str or ~azure.ai.projects.models.SHAREPOINT_GROUNDING_PREVIEW + :ivar sharepoint_grounding_preview: The sharepoint grounding tool parameters. Required. + :vartype sharepoint_grounding_preview: + ~azure.ai.projects.models.SharepointGroundingToolParameters + """ + + type: Literal[ToolType.SHAREPOINT_GROUNDING_PREVIEW] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The object type, which is always 'sharepoint_grounding_preview'. Required.""" + sharepoint_grounding_preview: "_models.SharepointGroundingToolParameters" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The sharepoint grounding tool parameters. Required.""" + + @overload + def __init__( + self, + *, + sharepoint_grounding_preview: "_models.SharepointGroundingToolParameters", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolType.SHAREPOINT_GROUNDING_PREVIEW # type: ignore + + class StructuredInputDefinition(_Model): """An structured input that can participate in prompt template substitutions and tool argument binding. @@ -13730,17 +12439,16 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class StructuredOutputsItemResource(ItemResource, discriminator="structured_outputs"): """StructuredOutputsItemResource. - :ivar id: Required. - :vartype id: str - :ivar created_by: The information about the creator of the item. - :vartype created_by: ~azure.ai.projects.models.CreatedBy + :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or + a str type. + :vartype created_by: ~azure.ai.projects.models.CreatedBy or str :ivar type: Required. :vartype type: str or ~azure.ai.projects.models.STRUCTURED_OUTPUTS :ivar output: The structured output captured during the response. Required. :vartype output: any """ - type: Literal[ItemType.STRUCTURED_OUTPUTS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + type: Literal[ItemResourceType.STRUCTURED_OUTPUTS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore """Required.""" output: Any = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The structured output captured during the response. Required.""" @@ -13749,9 +12457,42 @@ class StructuredOutputsItemResource(ItemResource, discriminator="structured_outp def __init__( self, *, - id: str, # pylint: disable=redefined-builtin output: Any, - created_by: Optional["_models.CreatedBy"] = None, + created_by: Optional[Union["_models.CreatedBy", str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemResourceType.STRUCTURED_OUTPUTS # type: ignore + + +class Summary(_Model): + """Summary text. + + :ivar type: The type of the object. Always ``summary_text``. Required. Default value is + "summary_text". + :vartype type: str + :ivar text: A summary of the reasoning output from the model so far. Required. + :vartype text: str + """ + + type: Literal["summary_text"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The type of the object. Always ``summary_text``. Required. Default value is \"summary_text\".""" + text: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A summary of the reasoning output from the model so far. Required.""" + + @overload + def __init__( + self, + *, + text: str, ) -> None: ... @overload @@ -13763,7 +12504,7 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ItemType.STRUCTURED_OUTPUTS # type: ignore + self.type: Literal["summary_text"] = "summary_text" class TaxonomyCategory(_Model): @@ -13877,142 +12618,34 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class ToolChoiceObject(_Model): - """ToolChoiceObject. +class TextResponseFormatConfiguration(_Model): + """An object specifying the format that the model must output. + Configuring ``{ "type": "json_schema" }`` enables Structured Outputs, + which ensures the model will match your supplied JSON schema. Learn more in the + `Structured Outputs guide `_. + The default format is ``{ "type": "text" }`` with no additional options. + *Not recommended for gpt-4o and newer models:** + Setting to ``{ "type": "json_object" }`` enables the older JSON mode, which + ensures the message the model generates is valid JSON. Using ``json_schema`` + is preferred for models that support it. You probably want to use the sub-classes and not this class directly. Known sub-classes are: - ToolChoiceObjectCodeInterpreter, ToolChoiceObjectComputer, ToolChoiceObjectFileSearch, - ToolChoiceObjectFunction, ToolChoiceObjectImageGen, ToolChoiceObjectMCP, - ToolChoiceObjectWebSearch + TextResponseFormatConfigurationResponseFormatJsonObject, TextResponseFormatJsonSchema, + TextResponseFormatConfigurationResponseFormatText - :ivar type: Required. Known values are: "file_search", "function", "computer_use_preview", - "web_search_preview", "image_generation", "code_interpreter", and "mcp". - :vartype type: str or ~azure.ai.projects.models.ToolChoiceObjectType + :ivar type: Required. Known values are: "text", "json_schema", and "json_object". + :vartype type: str or ~azure.ai.projects.models.TextResponseFormatConfigurationType """ __mapping__: dict[str, _Model] = {} type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) - """Required. Known values are: \"file_search\", \"function\", \"computer_use_preview\", - \"web_search_preview\", \"image_generation\", \"code_interpreter\", and \"mcp\".""" - - @overload - def __init__( - self, - *, - type: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class ToolChoiceObjectCodeInterpreter(ToolChoiceObject, discriminator="code_interpreter"): - """ToolChoiceObjectCodeInterpreter. - - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.CODE_INTERPRETER - """ - - type: Literal[ToolChoiceObjectType.CODE_INTERPRETER] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - - @overload - def __init__( - self, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ToolChoiceObjectType.CODE_INTERPRETER # type: ignore - - -class ToolChoiceObjectComputer(ToolChoiceObject, discriminator="computer_use_preview"): - """ToolChoiceObjectComputer. - - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.COMPUTER - """ - - type: Literal[ToolChoiceObjectType.COMPUTER] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - - @overload - def __init__( - self, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ToolChoiceObjectType.COMPUTER # type: ignore - - -class ToolChoiceObjectFileSearch(ToolChoiceObject, discriminator="file_search"): - """ToolChoiceObjectFileSearch. - - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.FILE_SEARCH - """ - - type: Literal[ToolChoiceObjectType.FILE_SEARCH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - - @overload - def __init__( - self, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - self.type = ToolChoiceObjectType.FILE_SEARCH # type: ignore - - -class ToolChoiceObjectFunction(ToolChoiceObject, discriminator="function"): - """Use this option to force the model to call a specific function. - - :ivar type: For function calling, the type is always ``function``. Required. - :vartype type: str or ~azure.ai.projects.models.FUNCTION - :ivar name: The name of the function to call. Required. - :vartype name: str - """ - - type: Literal[ToolChoiceObjectType.FUNCTION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """For function calling, the type is always ``function``. Required.""" - name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The name of the function to call. Required.""" + """Required. Known values are: \"text\", \"json_schema\", and \"json_object\".""" @overload def __init__( self, *, - name: str, + type: str, ) -> None: ... @overload @@ -14024,18 +12657,19 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ToolChoiceObjectType.FUNCTION # type: ignore -class ToolChoiceObjectImageGen(ToolChoiceObject, discriminator="image_generation"): - """ToolChoiceObjectImageGen. +class TextResponseFormatConfigurationResponseFormatJsonObject( + TextResponseFormatConfiguration, discriminator="json_object" +): # pylint: disable=name-too-long + """JSON object. - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.IMAGE_GENERATION + :ivar type: The type of response format being defined. Always ``json_object``. Required. + :vartype type: str or ~azure.ai.projects.models.JSON_OBJECT """ - type: Literal[ToolChoiceObjectType.IMAGE_GENERATION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" + type: Literal[TextResponseFormatConfigurationType.JSON_OBJECT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of response format being defined. Always ``json_object``. Required.""" @overload def __init__( @@ -14051,33 +12685,24 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ToolChoiceObjectType.IMAGE_GENERATION # type: ignore + self.type = TextResponseFormatConfigurationType.JSON_OBJECT # type: ignore -class ToolChoiceObjectMCP(ToolChoiceObject, discriminator="mcp"): - """Use this option to force the model to call a specific tool on a remote MCP server. +class TextResponseFormatConfigurationResponseFormatText( + TextResponseFormatConfiguration, discriminator="text" +): # pylint: disable=name-too-long + """Text. - :ivar type: For MCP tools, the type is always ``mcp``. Required. - :vartype type: str or ~azure.ai.projects.models.MCP - :ivar server_label: The label of the MCP server to use. Required. - :vartype server_label: str - :ivar name: The name of the tool to call on the server. - :vartype name: str + :ivar type: The type of response format being defined. Always ``text``. Required. + :vartype type: str or ~azure.ai.projects.models.TEXT """ - type: Literal[ToolChoiceObjectType.MCP] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """For MCP tools, the type is always ``mcp``. Required.""" - server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The label of the MCP server to use. Required.""" - name: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The name of the tool to call on the server.""" + type: Literal[TextResponseFormatConfigurationType.TEXT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of response format being defined. Always ``text``. Required.""" @overload def __init__( self, - *, - server_label: str, - name: Optional[str] = None, ) -> None: ... @overload @@ -14089,22 +12714,46 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ToolChoiceObjectType.MCP # type: ignore + self.type = TextResponseFormatConfigurationType.TEXT # type: ignore -class ToolChoiceObjectWebSearch(ToolChoiceObject, discriminator="web_search_preview"): - """Note: web_search is not yet available via Azure OpenAI. +class TextResponseFormatJsonSchema(TextResponseFormatConfiguration, discriminator="json_schema"): + """JSON schema. - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.WEB_SEARCH + :ivar type: The type of response format being defined. Always ``json_schema``. Required. + :vartype type: str or ~azure.ai.projects.models.JSON_SCHEMA + :ivar description: A description of what the response format is for, used by the model to + determine how to respond in the format. + :vartype description: str + :ivar name: The name of the response format. Must be a-z, A-Z, 0-9, or contain + underscores and dashes, with a maximum length of 64. Required. + :vartype name: str + :ivar schema: Required. + :vartype schema: dict[str, any] + :ivar strict: + :vartype strict: bool """ - type: Literal[ToolChoiceObjectType.WEB_SEARCH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + type: Literal[TextResponseFormatConfigurationType.JSON_SCHEMA] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of response format being defined. Always ``json_schema``. Required.""" + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A description of what the response format is for, used by the model to + determine how to respond in the format.""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the response format. Must be a-z, A-Z, 0-9, or contain + underscores and dashes, with a maximum length of 64. Required.""" + schema: dict[str, Any] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """Required.""" + strict: Optional[bool] = rest_field(visibility=["read", "create", "update", "delete", "query"]) @overload def __init__( self, + *, + name: str, + schema: dict[str, Any], + description: Optional[str] = None, + strict: Optional[bool] = None, ) -> None: ... @overload @@ -14116,7 +12765,7 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ToolChoiceObjectType.WEB_SEARCH # type: ignore + self.type = TextResponseFormatConfigurationType.JSON_SCHEMA # type: ignore class ToolDescription(_Model): @@ -14182,7 +12831,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class TopLogProb(_Model): - """The top log probability of a token. + """Top log probability. :ivar token: Required. :vartype token: str @@ -14219,6 +12868,90 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) +class Type(ComputerAction, discriminator="type"): + """Type. + + :ivar type: Specifies the event type. For a type action, this property is + always set to ``type``. Required. + :vartype type: str or ~azure.ai.projects.models.TYPE + :ivar text: The text to type. Required. + :vartype text: str + """ + + type: Literal[ComputerActionType.TYPE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Specifies the event type. For a type action, this property is + always set to ``type``. Required.""" + text: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The text to type. Required.""" + + @overload + def __init__( + self, + *, + text: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ComputerActionType.TYPE # type: ignore + + +class UrlCitationBody(Annotation, discriminator="url_citation"): + """URL citation. + + :ivar type: The type of the URL citation. Always ``url_citation``. Required. + :vartype type: str or ~azure.ai.projects.models.URL_CITATION + :ivar url: The URL of the web resource. Required. + :vartype url: str + :ivar start_index: The index of the first character of the URL citation in the message. + Required. + :vartype start_index: int + :ivar end_index: The index of the last character of the URL citation in the message. Required. + :vartype end_index: int + :ivar title: The title of the web resource. Required. + :vartype title: str + """ + + type: Literal[AnnotationType.URL_CITATION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the URL citation. Always ``url_citation``. Required.""" + url: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The URL of the web resource. Required.""" + start_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the first character of the URL citation in the message. Required.""" + end_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the last character of the URL citation in the message. Required.""" + title: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The title of the web resource. Required.""" + + @overload + def __init__( + self, + *, + url: str, + start_index: int, + end_index: int, + title: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = AnnotationType.URL_CITATION # type: ignore + + class UserProfileMemoryItem(MemoryItem, discriminator="user_profile"): """A memory item specifically containing user profile information extracted from conversations, such as preferences, interests, and personal details. @@ -14272,25 +13005,21 @@ class VectorStoreFileAttributes(_Model): """ -class WebSearchAction(_Model): - """WebSearchAction. - - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - WebSearchActionFind, WebSearchActionOpenPage, WebSearchActionSearch +class Wait(ComputerAction, discriminator="wait"): + """Wait. - :ivar type: Required. Known values are: "search", "open_page", and "find". - :vartype type: str or ~azure.ai.projects.models.WebSearchActionType + :ivar type: Specifies the event type. For a wait action, this property is + always set to ``wait``. Required. + :vartype type: str or ~azure.ai.projects.models.WAIT """ - __mapping__: dict[str, _Model] = {} - type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) - """Required. Known values are: \"search\", \"open_page\", and \"find\".""" + type: Literal[ComputerActionType.WAIT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Specifies the event type. For a wait action, this property is + always set to ``wait``. Required.""" @overload def __init__( self, - *, - type: str, ) -> None: ... @overload @@ -14302,21 +13031,22 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) + self.type = ComputerActionType.WAIT # type: ignore -class WebSearchActionFind(WebSearchAction, discriminator="find"): - """Action type "find": Searches for a pattern within a loaded page. +class WebSearchActionFind(_Model): + """Find action. - :ivar type: The action type. Required. - :vartype type: str or ~azure.ai.projects.models.FIND + :ivar type: The action type. Required. Default value is "find_in_page". + :vartype type: str :ivar url: The URL of the page searched for the pattern. Required. :vartype url: str :ivar pattern: The pattern or text to search for within the page. Required. :vartype pattern: str """ - type: Literal[WebSearchActionType.FIND] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The action type. Required.""" + type: Literal["find_in_page"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The action type. Required. Default value is \"find_in_page\".""" url: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The URL of the page searched for the pattern. Required.""" pattern: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -14339,20 +13069,20 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = WebSearchActionType.FIND # type: ignore + self.type: Literal["find_in_page"] = "find_in_page" -class WebSearchActionOpenPage(WebSearchAction, discriminator="open_page"): - """Action type "open_page" - Opens a specific URL from search results. +class WebSearchActionOpenPage(_Model): + """Open page action. - :ivar type: The action type. Required. - :vartype type: str or ~azure.ai.projects.models.OPEN_PAGE + :ivar type: The action type. Required. Default value is "open_page". + :vartype type: str :ivar url: The URL opened by the model. Required. :vartype url: str """ - type: Literal[WebSearchActionType.OPEN_PAGE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The action type. Required.""" + type: Literal["open_page"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The action type. Required. Default value is \"open_page\".""" url: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The URL opened by the model. Required.""" @@ -14372,24 +13102,28 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = WebSearchActionType.OPEN_PAGE # type: ignore + self.type: Literal["open_page"] = "open_page" -class WebSearchActionSearch(WebSearchAction, discriminator="search"): - """Action type "search" - Performs a web search query. +class WebSearchActionSearch(_Model): + """Search action. - :ivar type: The action type. Required. - :vartype type: str or ~azure.ai.projects.models.SEARCH - :ivar query: The search query. Required. + :ivar type: The action type. Required. Default value is "search". + :vartype type: str + :ivar query: [DEPRECATED] The search query. Required. :vartype query: str + :ivar queries: Search queries. + :vartype queries: list[str] :ivar sources: Web search sources. :vartype sources: list[~azure.ai.projects.models.WebSearchActionSearchSources] """ - type: Literal[WebSearchActionType.SEARCH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The action type. Required.""" + type: Literal["search"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The action type. Required. Default value is \"search\".""" query: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The search query. Required.""" + """[DEPRECATED] The search query. Required.""" + queries: Optional[list[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Search queries.""" sources: Optional[list["_models.WebSearchActionSearchSources"]] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) @@ -14400,6 +13134,7 @@ def __init__( self, *, query: str, + queries: Optional[list[str]] = None, sources: Optional[list["_models.WebSearchActionSearchSources"]] = None, ) -> None: ... @@ -14412,7 +13147,7 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = WebSearchActionType.SEARCH # type: ignore + self.type: Literal["search"] = "search" class WebSearchActionSearchSources(_Model): @@ -14448,38 +13183,118 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: self.type: Literal["url"] = "url" +class WebSearchApproximateLocation(_Model): + """Web search approximate location. + + :ivar type: The type of location approximation. Always ``approximate``. Default value is + "approximate". + :vartype type: str + :ivar country: + :vartype country: str + :ivar region: + :vartype region: str + :ivar city: + :vartype city: str + :ivar timezone: + :vartype timezone: str + """ + + type: Optional[Literal["approximate"]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The type of location approximation. Always ``approximate``. Default value is \"approximate\".""" + country: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + region: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + city: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + timezone: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + + @overload + def __init__( + self, + *, + type: Optional[Literal["approximate"]] = None, + country: Optional[str] = None, + region: Optional[str] = None, + city: Optional[str] = None, + timezone: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class WebSearchConfiguration(_Model): + """A web search configuration for bing custom search. + + :ivar project_connection_id: Project connection id for grounding with bing custom search. + Required. + :vartype project_connection_id: str + :ivar instance_name: Name of the custom configuration instance given to config. Required. + :vartype instance_name: str + """ + + project_connection_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Project connection id for grounding with bing custom search. Required.""" + instance_name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Name of the custom configuration instance given to config. Required.""" + + @overload + def __init__( + self, + *, + project_connection_id: str, + instance_name: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + class WebSearchPreviewTool(Tool, discriminator="web_search_preview"): - """Note: web_search is not yet available via Azure OpenAI. + """Web search preview. :ivar type: The type of the web search tool. One of ``web_search_preview`` or ``web_search_preview_2025_03_11``. Required. :vartype type: str or ~azure.ai.projects.models.WEB_SEARCH_PREVIEW - :ivar user_location: The user's location. - :vartype user_location: ~azure.ai.projects.models.Location + :ivar user_location: + :vartype user_location: ~azure.ai.projects.models.ApproximateLocation :ivar search_context_size: High level guidance for the amount of context window space to use - for the search. One of ``low``, ``medium``, or ``high``. ``medium`` is the default. Is one of - the following types: Literal["low"], Literal["medium"], Literal["high"] - :vartype search_context_size: str or str or str + for the search. One of ``low``, ``medium``, or ``high``. ``medium`` is the default. Known + values are: "low", "medium", and "high". + :vartype search_context_size: str or ~azure.ai.projects.models.SearchContextSize """ type: Literal[ToolType.WEB_SEARCH_PREVIEW] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore """The type of the web search tool. One of ``web_search_preview`` or ``web_search_preview_2025_03_11``. Required.""" - user_location: Optional["_models.Location"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The user's location.""" - search_context_size: Optional[Literal["low", "medium", "high"]] = rest_field( + user_location: Optional["_models.ApproximateLocation"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + search_context_size: Optional[Union[str, "_models.SearchContextSize"]] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) """High level guidance for the amount of context window space to use for the search. One of - ``low``, ``medium``, or ``high``. ``medium`` is the default. Is one of the following types: - Literal[\"low\"], Literal[\"medium\"], Literal[\"high\"]""" + ``low``, ``medium``, or ``high``. ``medium`` is the default. Known values are: \"low\", + \"medium\", and \"high\".""" @overload def __init__( self, *, - user_location: Optional["_models.Location"] = None, - search_context_size: Optional[Literal["low", "medium", "high"]] = None, + user_location: Optional["_models.ApproximateLocation"] = None, + search_context_size: Optional[Union[str, "_models.SearchContextSize"]] = None, ) -> None: ... @overload @@ -14494,29 +13309,54 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: self.type = ToolType.WEB_SEARCH_PREVIEW # type: ignore -class WebSearchToolCallItemParam(ItemParam, discriminator="web_search_call"): - """The results of a web search tool call. See the - `web search guide `_ for more - information. +class WebSearchTool(Tool, discriminator="web_search"): + """Web search. - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.WEB_SEARCH_CALL - :ivar action: An object describing the specific action taken in this web search call. - Includes details on how the model used the web (search, open_page, find). Required. - :vartype action: ~azure.ai.projects.models.WebSearchAction + :ivar type: The type of the web search tool. One of ``web_search`` or + ``web_search_2025_08_26``. Required. + :vartype type: str or ~azure.ai.projects.models.WEB_SEARCH + :ivar filters: + :vartype filters: ~azure.ai.projects.models.WebSearchToolFilters + :ivar user_location: + :vartype user_location: ~azure.ai.projects.models.WebSearchApproximateLocation + :ivar search_context_size: High level guidance for the amount of context window space to use + for the search. One of ``low``, ``medium``, or ``high``. ``medium`` is the default. Is one of + the following types: Literal["low"], Literal["medium"], Literal["high"] + :vartype search_context_size: str or str or str + :ivar custom_search_configuration: The project connections attached to this tool. There can be + a maximum of 1 connection + resource attached to the tool. + :vartype custom_search_configuration: ~azure.ai.projects.models.WebSearchConfiguration """ - type: Literal[ItemType.WEB_SEARCH_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - action: "_models.WebSearchAction" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """An object describing the specific action taken in this web search call. - Includes details on how the model used the web (search, open_page, find). Required.""" + type: Literal[ToolType.WEB_SEARCH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the web search tool. One of ``web_search`` or ``web_search_2025_08_26``. Required.""" + filters: Optional["_models.WebSearchToolFilters"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + user_location: Optional["_models.WebSearchApproximateLocation"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + search_context_size: Optional[Literal["low", "medium", "high"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """High level guidance for the amount of context window space to use for the search. One of + ``low``, ``medium``, or ``high``. ``medium`` is the default. Is one of the following types: + Literal[\"low\"], Literal[\"medium\"], Literal[\"high\"]""" + custom_search_configuration: Optional["_models.WebSearchConfiguration"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The project connections attached to this tool. There can be a maximum of 1 connection + resource attached to the tool.""" @overload def __init__( self, *, - action: "_models.WebSearchAction", + filters: Optional["_models.WebSearchToolFilters"] = None, + user_location: Optional["_models.WebSearchApproximateLocation"] = None, + search_context_size: Optional[Literal["low", "medium", "high"]] = None, + custom_search_configuration: Optional["_models.WebSearchConfiguration"] = None, ) -> None: ... @overload @@ -14528,47 +13368,23 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ItemType.WEB_SEARCH_CALL # type: ignore + self.type = ToolType.WEB_SEARCH # type: ignore -class WebSearchToolCallItemResource(ItemResource, discriminator="web_search_call"): - """The results of a web search tool call. See the - `web search guide `_ for more - information. +class WebSearchToolFilters(_Model): + """WebSearchToolFilters. - :ivar id: Required. - :vartype id: str - :ivar created_by: The information about the creator of the item. - :vartype created_by: ~azure.ai.projects.models.CreatedBy - :ivar type: Required. - :vartype type: str or ~azure.ai.projects.models.WEB_SEARCH_CALL - :ivar status: The status of the web search tool call. Required. Is one of the following types: - Literal["in_progress"], Literal["searching"], Literal["completed"], Literal["failed"] - :vartype status: str or str or str or str - :ivar action: An object describing the specific action taken in this web search call. - Includes details on how the model used the web (search, open_page, find). Required. - :vartype action: ~azure.ai.projects.models.WebSearchAction + :ivar allowed_domains: + :vartype allowed_domains: list[str] """ - type: Literal[ItemType.WEB_SEARCH_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """Required.""" - status: Literal["in_progress", "searching", "completed", "failed"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The status of the web search tool call. Required. Is one of the following types: - Literal[\"in_progress\"], Literal[\"searching\"], Literal[\"completed\"], Literal[\"failed\"]""" - action: "_models.WebSearchAction" = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """An object describing the specific action taken in this web search call. - Includes details on how the model used the web (search, open_page, find). Required.""" + allowed_domains: Optional[list[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) @overload def __init__( self, *, - id: str, # pylint: disable=redefined-builtin - status: Literal["in_progress", "searching", "completed", "failed"], - action: "_models.WebSearchAction", - created_by: Optional["_models.CreatedBy"] = None, + allowed_domains: Optional[list[str]] = None, ) -> None: ... @overload @@ -14580,7 +13396,6 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ItemType.WEB_SEARCH_CALL # type: ignore class WeeklyRecurrenceSchedule(RecurrenceSchedule, discriminator="Weekly"): @@ -14621,10 +13436,9 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class WorkflowActionOutputItemResource(ItemResource, discriminator="workflow_action"): """WorkflowActionOutputItemResource. - :ivar id: Required. - :vartype id: str - :ivar created_by: The information about the creator of the item. - :vartype created_by: ~azure.ai.projects.models.CreatedBy + :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or + a str type. + :vartype created_by: ~azure.ai.projects.models.CreatedBy or str :ivar type: Required. :vartype type: str or ~azure.ai.projects.models.WORKFLOW_ACTION :ivar kind: The kind of CSDL action (e.g., 'SetVariable', 'InvokeAzureAgent'). Required. @@ -14641,7 +13455,7 @@ class WorkflowActionOutputItemResource(ItemResource, discriminator="workflow_act :vartype status: str or str or str or str """ - type: Literal[ItemType.WORKFLOW_ACTION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + type: Literal[ItemResourceType.WORKFLOW_ACTION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore """Required.""" kind: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The kind of CSDL action (e.g., 'SetVariable', 'InvokeAzureAgent'). Required.""" @@ -14662,11 +13476,10 @@ class WorkflowActionOutputItemResource(ItemResource, discriminator="workflow_act def __init__( self, *, - id: str, # pylint: disable=redefined-builtin kind: str, action_id: str, status: Literal["completed", "failed", "in_progress", "cancelled"], - created_by: Optional["_models.CreatedBy"] = None, + created_by: Optional[Union["_models.CreatedBy", str]] = None, parent_action_id: Optional[str] = None, previous_action_id: Optional[str] = None, ) -> None: ... @@ -14680,7 +13493,7 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.type = ItemType.WORKFLOW_ACTION # type: ignore + self.type = ItemResourceType.WORKFLOW_ACTION # type: ignore class WorkflowAgentDefinition(AgentDefinition, discriminator="workflow"): diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_patch.py b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_patch.py index fc88ae38be4a..e3b7029fdbee 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_patch.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_patch.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. @@ -259,7 +260,7 @@ def from_continuation_token( cls, polling_method: AsyncPollingMethod[MemoryStoreUpdateCompletedResult], continuation_token: str, - **kwargs: Any + **kwargs: Any, ) -> "AsyncUpdateMemoriesLROPoller": """Create a poller from a continuation token. diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_operations.py b/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_operations.py index 71813b37c2f7..3def1c5f7d71 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_operations.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=line-too-long,useless-suppression,too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -37,7 +37,6 @@ from .._configuration import AIProjectClientConfiguration from .._utils.model_base import SdkJSONEncoder, _deserialize, _failsafe_deserialize from .._utils.serialization import Deserializer, Serializer -from .._validation import api_version_validation JSON = MutableMapping[str, Any] _Unset: Any = object() @@ -201,7 +200,7 @@ def build_agents_list_request( *, kind: Optional[Union[str, _models.AgentKind]] = None, limit: Optional[int] = None, - order: Optional[Literal["asc", "desc"]] = None, + order: Optional[Union[str, _models.PageOrder]] = None, after: Optional[str] = None, before: Optional[str] = None, **kwargs: Any @@ -344,7 +343,7 @@ def build_agents_list_versions_request( agent_name: str, *, limit: Optional[int] = None, - order: Optional[Literal["asc", "desc"]] = None, + order: Optional[Union[str, _models.PageOrder]] = None, after: Optional[str] = None, before: Optional[str] = None, **kwargs: Any @@ -380,6 +379,39 @@ def build_agents_list_versions_request( return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) +def build_agents_stream_agent_container_logs_request( # pylint: disable=name-too-long + agent_name: str, + agent_version: str, + *, + kind: Optional[Union[str, _models.ContainerLogKind]] = None, + replica_name: Optional[str] = None, + tail: Optional[int] = None, + **kwargs: Any +) -> HttpRequest: + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-11-15-preview")) + # Construct URL + _url = "/agents/{agent_name}/versions/{agent_version}/containers/default:logstream" + path_format_arguments = { + "agent_name": _SERIALIZER.url("agent_name", agent_name, "str"), + "agent_version": _SERIALIZER.url("agent_version", agent_version, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if kind is not None: + _params["kind"] = _SERIALIZER.query("kind", kind, "str") + if replica_name is not None: + _params["replica_name"] = _SERIALIZER.query("replica_name", replica_name, "str") + if tail is not None: + _params["tail"] = _SERIALIZER.query("tail", tail, "int") + + return HttpRequest(method="POST", url=_url, params=_params, **kwargs) + + def build_memory_stores_create_request(**kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) @@ -456,7 +488,7 @@ def build_memory_stores_get_request(name: str, **kwargs: Any) -> HttpRequest: def build_memory_stores_list_request( *, limit: Optional[int] = None, - order: Optional[Literal["asc", "desc"]] = None, + order: Optional[Union[str, _models.PageOrder]] = None, after: Optional[str] = None, before: Optional[str] = None, **kwargs: Any @@ -1722,11 +1754,6 @@ def __init__(self, *args, **kwargs) -> None: self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "agent_name", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) def get(self, agent_name: str, **kwargs: Any) -> _models.AgentDetails: """Retrieves the agent. @@ -1859,11 +1886,6 @@ def create(self, body: IO[bytes], *, content_type: str = "application/json", **k """ @distributed_trace - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "content_type", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) def create( self, body: Union[JSON, IO[bytes]] = _Unset, @@ -2046,11 +2068,6 @@ def update( """ @distributed_trace - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "agent_name", "content_type", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) def update( self, agent_name: str, @@ -2229,11 +2246,6 @@ def create_from_manifest( """ @distributed_trace - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "content_type", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) def create_from_manifest( self, body: Union[JSON, IO[bytes]] = _Unset, @@ -2433,11 +2445,6 @@ def update_from_manifest( """ @distributed_trace - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "agent_name", "content_type", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) def update_from_manifest( self, agent_name: str, @@ -2552,11 +2559,6 @@ def update_from_manifest( return deserialized # type: ignore @distributed_trace - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "agent_name", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) def delete(self, agent_name: str, **kwargs: Any) -> _models.DeleteAgentResponse: """Deletes an agent. @@ -2621,17 +2623,12 @@ def delete(self, agent_name: str, **kwargs: Any) -> _models.DeleteAgentResponse: return deserialized # type: ignore @distributed_trace - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "kind", "limit", "order", "after", "before", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) def list( self, *, kind: Optional[Union[str, _models.AgentKind]] = None, limit: Optional[int] = None, - order: Optional[Literal["asc", "desc"]] = None, + order: Optional[Union[str, _models.PageOrder]] = None, before: Optional[str] = None, **kwargs: Any ) -> ItemPaged["_models.AgentDetails"]: @@ -2646,9 +2643,8 @@ def list( :paramtype limit: int :keyword order: Sort order by the ``created_at`` timestamp of the objects. ``asc`` for ascending order and``desc`` - for descending order. Is either a Literal["asc"] type or a Literal["desc"] type. Default value - is None. - :paramtype order: str or str + for descending order. Known values are: "asc" and "desc". Default value is None. + :paramtype order: str or ~azure.ai.projects.models.PageOrder :keyword before: A cursor for use in pagination. ``before`` is an object ID that defines your place in the list. For instance, if you make a list request and receive 100 objects, ending with obj_foo, your @@ -2805,11 +2801,6 @@ def create_version( """ @distributed_trace - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "agent_name", "content_type", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) def create_version( self, agent_name: str, @@ -3006,11 +2997,6 @@ def create_version_from_manifest( """ @distributed_trace - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "agent_name", "content_type", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) def create_version_from_manifest( self, agent_name: str, @@ -3128,11 +3114,6 @@ def create_version_from_manifest( return deserialized # type: ignore @distributed_trace - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "agent_name", "agent_version", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) def get_version(self, agent_name: str, agent_version: str, **kwargs: Any) -> _models.AgentVersionDetails: """Retrieves a specific version of an agent. @@ -3200,11 +3181,6 @@ def get_version(self, agent_name: str, agent_version: str, **kwargs: Any) -> _mo return deserialized # type: ignore @distributed_trace - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "agent_name", "agent_version", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) def delete_version(self, agent_name: str, agent_version: str, **kwargs: Any) -> _models.DeleteAgentVersionResponse: """Deletes a specific version of an agent. @@ -3273,19 +3249,12 @@ def delete_version(self, agent_name: str, agent_version: str, **kwargs: Any) -> return deserialized # type: ignore @distributed_trace - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={ - "2025-11-15-preview": ["api_version", "agent_name", "limit", "order", "after", "before", "accept"] - }, - api_versions_list=["2025-11-15-preview"], - ) def list_versions( self, agent_name: str, *, limit: Optional[int] = None, - order: Optional[Literal["asc", "desc"]] = None, + order: Optional[Union[str, _models.PageOrder]] = None, before: Optional[str] = None, **kwargs: Any ) -> ItemPaged["_models.AgentVersionDetails"]: @@ -3299,9 +3268,8 @@ def list_versions( :paramtype limit: int :keyword order: Sort order by the ``created_at`` timestamp of the objects. ``asc`` for ascending order and``desc`` - for descending order. Is either a Literal["asc"] type or a Literal["desc"] type. Default value - is None. - :paramtype order: str or str + for descending order. Known values are: "asc" and "desc". Default value is None. + :paramtype order: str or ~azure.ai.projects.models.PageOrder :keyword before: A cursor for use in pagination. ``before`` is an object ID that defines your place in the list. For instance, if you make a list request and receive 100 objects, ending with obj_foo, your @@ -3371,6 +3339,106 @@ def get_next(_continuation_token=None): return ItemPaged(get_next, extract_data) + @distributed_trace + def stream_agent_container_logs( # pylint: disable=inconsistent-return-statements + self, + agent_name: str, + agent_version: str, + *, + kind: Optional[Union[str, _models.ContainerLogKind]] = None, + replica_name: Optional[str] = None, + tail: Optional[int] = None, + **kwargs: Any + ) -> None: + """Container log entry streamed from the container as text chunks. + Each chunk is a UTF-8 string that may be either a plain text log line + or a JSON-formatted log entry, depending on the type of container log being streamed. + Clients should treat each chunk as opaque text and, if needed, attempt + to parse it as JSON based on their logging requirements. + + For system logs, the format is JSON with the following structure: + {"TimeStamp":"2025-12-15T16:51:33Z","Type":"Normal","ContainerAppName":null,"RevisionName":null,"ReplicaName":null,"Msg":"Connecting + to the events + collector...","Reason":"StartingGettingEvents","EventSource":"ContainerAppController","Count":1} + {"TimeStamp":"2025-12-15T16:51:34Z","Type":"Normal","ContainerAppName":null,"RevisionName":null,"ReplicaName":null,"Msg":"Successfully + connected to events + server","Reason":"ConnectedToEventsServer","EventSource":"ContainerAppController","Count":1} + + For console logs, the format is plain text as emitted by the container's stdout/stderr. + 2025-12-15T08:43:48.72656 Connecting to the container 'agent-container'... + 2025-12-15T08:43:48.75451 Successfully Connected to container: 'agent-container' [Revision: + 'je90fe655aa742ef9a188b9fd14d6764--7tca06b', Replica: + 'je90fe655aa742ef9a188b9fd14d6764--7tca06b-6898b9c89f-mpkjc'] + 2025-12-15T08:33:59.0671054Z stdout F INFO: 127.0.0.1:42588 - "GET /readiness HTTP/1.1" 200 + OK + 2025-12-15T08:34:29.0649033Z stdout F INFO: 127.0.0.1:60246 - "GET /readiness HTTP/1.1" 200 + OK + 2025-12-15T08:34:59.0644467Z stdout F INFO: 127.0.0.1:43994 - "GET /readiness HTTP/1.1" 200 + OK. + + :param agent_name: The name of the agent. Required. + :type agent_name: str + :param agent_version: The version of the agent. Required. + :type agent_version: str + :keyword kind: console returns container stdout/stderr, system returns container app event + stream. defaults to console. Known values are: "console" and "system". Default value is None. + :paramtype kind: str or ~azure.ai.projects.models.ContainerLogKind + :keyword replica_name: When omitted, the server chooses the first replica for console logs. + Required to target a specific replica. Default value is None. + :paramtype replica_name: str + :keyword tail: Number of trailing lines returned. Enforced to 1-300. Defaults to 20. Default + value is None. + :paramtype tail: int + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_agents_stream_agent_container_logs_request( + agent_name=agent_name, + agent_version=agent_version, + kind=kind, + replica_name=replica_name, + tail=tail, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ApiErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + class MemoryStoresOperations: """ @@ -3452,11 +3520,6 @@ def create( """ @distributed_trace - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "content_type", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) def create( self, body: Union[JSON, IO[bytes]] = _Unset, @@ -3618,11 +3681,6 @@ def update( """ @distributed_trace - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "name", "content_type", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) def update( self, name: str, @@ -3715,11 +3773,6 @@ def update( return deserialized # type: ignore @distributed_trace - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "name", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) def get(self, name: str, **kwargs: Any) -> _models.MemoryStoreDetails: """Retrieve a memory store. @@ -3784,16 +3837,11 @@ def get(self, name: str, **kwargs: Any) -> _models.MemoryStoreDetails: return deserialized # type: ignore @distributed_trace - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "limit", "order", "after", "before", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) def list( self, *, limit: Optional[int] = None, - order: Optional[Literal["asc", "desc"]] = None, + order: Optional[Union[str, _models.PageOrder]] = None, before: Optional[str] = None, **kwargs: Any ) -> ItemPaged["_models.MemoryStoreDetails"]: @@ -3805,9 +3853,8 @@ def list( :paramtype limit: int :keyword order: Sort order by the ``created_at`` timestamp of the objects. ``asc`` for ascending order and``desc`` - for descending order. Is either a Literal["asc"] type or a Literal["desc"] type. Default value - is None. - :paramtype order: str or str + for descending order. Known values are: "asc" and "desc". Default value is None. + :paramtype order: str or ~azure.ai.projects.models.PageOrder :keyword before: A cursor for use in pagination. ``before`` is an object ID that defines your place in the list. For instance, if you make a list request and receive 100 objects, ending with obj_foo, your @@ -3877,11 +3924,6 @@ def get_next(_continuation_token=None): return ItemPaged(get_next, extract_data) @distributed_trace - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "name", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) def delete(self, name: str, **kwargs: Any) -> _models.DeleteMemoryStoreResult: """Delete a memory store. @@ -3952,7 +3994,7 @@ def search_memories( *, scope: str, content_type: str = "application/json", - items: Optional[List[_models.ItemParam]] = None, + items: Optional[List[_models.InputItem]] = None, previous_search_id: Optional[str] = None, options: Optional[_models.MemorySearchOptions] = None, **kwargs: Any @@ -3968,7 +4010,7 @@ def search_memories( Default value is "application/json". :paramtype content_type: str :keyword items: Items for which to search for relevant memories. Default value is None. - :paramtype items: list[~azure.ai.projects.models.ItemParam] + :paramtype items: list[~azure.ai.projects.models.InputItem] :keyword previous_search_id: The unique ID of the previous search request, enabling incremental memory search from where the last operation left off. Default value is None. :paramtype previous_search_id: str @@ -4016,18 +4058,13 @@ def search_memories( """ @distributed_trace - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "name", "content_type", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) def search_memories( self, name: str, body: Union[JSON, IO[bytes]] = _Unset, *, scope: str = _Unset, - items: Optional[List[_models.ItemParam]] = None, + items: Optional[List[_models.InputItem]] = None, previous_search_id: Optional[str] = None, options: Optional[_models.MemorySearchOptions] = None, **kwargs: Any @@ -4042,7 +4079,7 @@ def search_memories( Required. :paramtype scope: str :keyword items: Items for which to search for relevant memories. Default value is None. - :paramtype items: list[~azure.ai.projects.models.ItemParam] + :paramtype items: list[~azure.ai.projects.models.InputItem] :keyword previous_search_id: The unique ID of the previous search request, enabling incremental memory search from where the last operation left off. Default value is None. :paramtype previous_search_id: str @@ -4126,18 +4163,13 @@ def search_memories( return deserialized # type: ignore - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "name", "content_type", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) def _update_memories_initial( self, name: str, body: Union[JSON, IO[bytes]] = _Unset, *, scope: str = _Unset, - items: Optional[List[_models.ItemParam]] = None, + items: Optional[List[_models.InputItem]] = None, previous_update_id: Optional[str] = None, update_delay: Optional[int] = None, **kwargs: Any @@ -4222,7 +4254,7 @@ def _begin_update_memories( *, scope: str, content_type: str = "application/json", - items: Optional[List[_models.ItemParam]] = None, + items: Optional[List[_models.InputItem]] = None, previous_update_id: Optional[str] = None, update_delay: Optional[int] = None, **kwargs: Any @@ -4237,18 +4269,13 @@ def _begin_update_memories( ) -> LROPoller[_models.MemoryStoreUpdateCompletedResult]: ... @distributed_trace - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "name", "content_type", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) def _begin_update_memories( self, name: str, body: Union[JSON, IO[bytes]] = _Unset, *, scope: str = _Unset, - items: Optional[List[_models.ItemParam]] = None, + items: Optional[List[_models.InputItem]] = None, previous_update_id: Optional[str] = None, update_delay: Optional[int] = None, **kwargs: Any @@ -4263,7 +4290,7 @@ def _begin_update_memories( Required. :paramtype scope: str :keyword items: Conversation items from which to extract memories. Default value is None. - :paramtype items: list[~azure.ai.projects.models.ItemParam] + :paramtype items: list[~azure.ai.projects.models.InputItem] :keyword previous_update_id: The unique ID of the previous update request, enabling incremental memory updates from where the last operation left off. Default value is None. :paramtype previous_update_id: str @@ -4398,11 +4425,6 @@ def delete_scope( """ @distributed_trace - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "name", "content_type", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) def delete_scope( self, name: str, body: Union[JSON, IO[bytes]] = _Unset, *, scope: str = _Unset, **kwargs: Any ) -> _models.MemoryStoreDeleteScopeResult: @@ -6068,11 +6090,6 @@ def __init__(self, *args, **kwargs) -> None: self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace - @api_version_validation( - method_added_on="2025-05-15-preview", - params_added_on={"2025-05-15-preview": ["api_version", "name", "client_request_id", "accept"]}, - api_versions_list=["2025-05-15-preview", "2025-11-15-preview"], - ) def get(self, name: str, **kwargs: Any) -> _models.RedTeam: """Get a redteam by name. @@ -6138,11 +6155,6 @@ def get(self, name: str, **kwargs: Any) -> _models.RedTeam: return deserialized # type: ignore @distributed_trace - @api_version_validation( - method_added_on="2025-05-15-preview", - params_added_on={"2025-05-15-preview": ["api_version", "client_request_id", "accept"]}, - api_versions_list=["2025-05-15-preview", "2025-11-15-preview"], - ) def list(self, **kwargs: Any) -> ItemPaged["_models.RedTeam"]: """List a redteam by name. @@ -6269,11 +6281,6 @@ def create(self, red_team: IO[bytes], *, content_type: str = "application/json", """ @distributed_trace - @api_version_validation( - method_added_on="2025-05-15-preview", - params_added_on={"2025-05-15-preview": ["api_version", "content_type", "accept"]}, - api_versions_list=["2025-05-15-preview", "2025-11-15-preview"], - ) def create(self, red_team: Union[_models.RedTeam, JSON, IO[bytes]], **kwargs: Any) -> _models.RedTeam: """Creates a redteam run. @@ -6362,11 +6369,6 @@ def __init__(self, *args, **kwargs) -> None: self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "id", "client_request_id", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) def get(self, id: str, **kwargs: Any) -> _models.EvaluationRule: """Get an evaluation rule. @@ -6432,11 +6434,6 @@ def get(self, id: str, **kwargs: Any) -> _models.EvaluationRule: return deserialized # type: ignore @distributed_trace - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "id", "client_request_id"]}, - api_versions_list=["2025-11-15-preview"], - ) def delete(self, id: str, **kwargs: Any) -> None: # pylint: disable=inconsistent-return-statements """Delete an evaluation rule. @@ -6544,11 +6541,6 @@ def create_or_update( """ @distributed_trace - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "id", "content_type", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) def create_or_update( self, id: str, evaluation_rule: Union[_models.EvaluationRule, JSON, IO[bytes]], **kwargs: Any ) -> _models.EvaluationRule: @@ -6624,13 +6616,6 @@ def create_or_update( return deserialized # type: ignore @distributed_trace - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={ - "2025-11-15-preview": ["api_version", "action_type", "agent_name", "enabled", "client_request_id", "accept"] - }, - api_versions_list=["2025-11-15-preview"], - ) def list( self, *, @@ -6748,11 +6733,6 @@ def __init__(self, *args, **kwargs) -> None: self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "name", "client_request_id", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) def get(self, name: str, **kwargs: Any) -> _models.EvaluationTaxonomy: """Get an evaluation run by name. @@ -6818,13 +6798,6 @@ def get(self, name: str, **kwargs: Any) -> _models.EvaluationTaxonomy: return deserialized # type: ignore @distributed_trace - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={ - "2025-11-15-preview": ["api_version", "input_name", "input_type", "client_request_id", "accept"] - }, - api_versions_list=["2025-11-15-preview"], - ) def list( self, *, input_name: Optional[str] = None, input_type: Optional[str] = None, **kwargs: Any ) -> ItemPaged["_models.EvaluationTaxonomy"]: @@ -6915,11 +6888,6 @@ def get_next(next_link=None): return ItemPaged(get_next, extract_data) @distributed_trace - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "name", "client_request_id"]}, - api_versions_list=["2025-11-15-preview"], - ) def delete(self, name: str, **kwargs: Any) -> None: # pylint: disable=inconsistent-return-statements """Delete an evaluation taxonomy by name. @@ -7027,11 +6995,6 @@ def create( """ @distributed_trace - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "name", "content_type", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) def create( self, name: str, body: Union[_models.EvaluationTaxonomy, JSON, IO[bytes]], **kwargs: Any ) -> _models.EvaluationTaxonomy: @@ -7161,11 +7124,6 @@ def update( """ @distributed_trace - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "name", "content_type", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) def update( self, name: str, body: Union[_models.EvaluationTaxonomy, JSON, IO[bytes]], **kwargs: Any ) -> _models.EvaluationTaxonomy: @@ -7259,11 +7217,6 @@ def __init__(self, *args, **kwargs) -> None: self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "name", "type", "limit", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) def list_versions( self, name: str, @@ -7365,11 +7318,6 @@ def get_next(next_link=None): return ItemPaged(get_next, extract_data) @distributed_trace - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "type", "limit", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) def list_latest_versions( self, *, @@ -7467,11 +7415,6 @@ def get_next(next_link=None): return ItemPaged(get_next, extract_data) @distributed_trace - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "name", "version", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) def get_version(self, name: str, version: str, **kwargs: Any) -> _models.EvaluatorVersion: """Get the specific version of the EvaluatorVersion. The service returns 404 Not Found error if the EvaluatorVersion does not exist. @@ -7536,11 +7479,6 @@ def get_version(self, name: str, version: str, **kwargs: Any) -> _models.Evaluat return deserialized # type: ignore @distributed_trace - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "name", "version"]}, - api_versions_list=["2025-11-15-preview"], - ) def delete_version( # pylint: disable=inconsistent-return-statements self, name: str, version: str, **kwargs: Any ) -> None: @@ -7654,11 +7592,6 @@ def create_version( """ @distributed_trace - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "name", "content_type", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) def create_version( self, name: str, evaluator_version: Union[_models.EvaluatorVersion, JSON, IO[bytes]], **kwargs: Any ) -> _models.EvaluatorVersion: @@ -7806,11 +7739,6 @@ def update_version( """ @distributed_trace - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "name", "version", "content_type", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) def update_version( self, name: str, @@ -7958,19 +7886,6 @@ def generate(self, insight: IO[bytes], *, content_type: str = "application/json" """ @distributed_trace - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={ - "2025-11-15-preview": [ - "api_version", - "repeatability_request_id", - "repeatability_first_sent", - "content_type", - "accept", - ] - }, - api_versions_list=["2025-11-15-preview"], - ) def generate(self, insight: Union[_models.Insight, JSON, IO[bytes]], **kwargs: Any) -> _models.Insight: """Generate Insights. @@ -8041,13 +7956,6 @@ def generate(self, insight: Union[_models.Insight, JSON, IO[bytes]], **kwargs: A return deserialized # type: ignore @distributed_trace - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={ - "2025-11-15-preview": ["api_version", "id", "include_coordinates", "client_request_id", "accept"] - }, - api_versions_list=["2025-11-15-preview"], - ) def get(self, id: str, *, include_coordinates: Optional[bool] = None, **kwargs: Any) -> _models.Insight: """Get a specific insight by Id. @@ -8117,22 +8025,6 @@ def get(self, id: str, *, include_coordinates: Optional[bool] = None, **kwargs: return deserialized # type: ignore @distributed_trace - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={ - "2025-11-15-preview": [ - "api_version", - "type", - "eval_id", - "run_id", - "agent_name", - "include_coordinates", - "client_request_id", - "accept", - ] - }, - api_versions_list=["2025-11-15-preview"], - ) def list( self, *, @@ -8259,11 +8151,6 @@ def __init__(self, *args, **kwargs) -> None: self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "id", "client_request_id"]}, - api_versions_list=["2025-11-15-preview"], - ) def delete(self, id: str, **kwargs: Any) -> None: # pylint: disable=inconsistent-return-statements """Delete a schedule. @@ -8317,11 +8204,6 @@ def delete(self, id: str, **kwargs: Any) -> None: # pylint: disable=inconsisten return cls(pipeline_response, None, response_headers) # type: ignore @distributed_trace - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "id", "client_request_id", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) def get(self, id: str, **kwargs: Any) -> _models.Schedule: """Get a schedule by id. @@ -8387,11 +8269,6 @@ def get(self, id: str, **kwargs: Any) -> _models.Schedule: return deserialized # type: ignore @distributed_trace - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "client_request_id", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) def list(self, **kwargs: Any) -> ItemPaged["_models.Schedule"]: """List all schedules. @@ -8528,11 +8405,6 @@ def create_or_update( """ @distributed_trace - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "id", "content_type", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) def create_or_update( self, id: str, schedule: Union[_models.Schedule, JSON, IO[bytes]], **kwargs: Any ) -> _models.Schedule: @@ -8608,11 +8480,6 @@ def create_or_update( return deserialized # type: ignore @distributed_trace - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "schedule_id", "run_id", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) def get_run(self, schedule_id: str, run_id: str, **kwargs: Any) -> _models.ScheduleRun: """Get a schedule run by id. @@ -8676,11 +8543,6 @@ def get_run(self, schedule_id: str, run_id: str, **kwargs: Any) -> _models.Sched return deserialized # type: ignore @distributed_trace - @api_version_validation( - method_added_on="2025-11-15-preview", - params_added_on={"2025-11-15-preview": ["api_version", "id", "client_request_id", "accept"]}, - api_versions_list=["2025-11-15-preview"], - ) def list_runs(self, id: str, **kwargs: Any) -> ItemPaged["_models.ScheduleRun"]: """List all schedule runs. diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch_memories.py b/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch_memories.py index e69e8d2988ec..ca25e90aa008 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch_memories.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch_memories.py @@ -14,8 +14,8 @@ from .. import models as _models from ..models import ( MemoryStoreOperationUsage, - MemoryStoreOperationUsageInputTokensDetails, - MemoryStoreOperationUsageOutputTokensDetails, + ResponseUsageInputTokensDetails, + ResponseUsageOutputTokensDetails, MemoryStoreUpdateCompletedResult, UpdateMemoriesLROPoller, UpdateMemoriesLROPollingMethod, @@ -34,7 +34,7 @@ def begin_update_memories( *, scope: str, content_type: str = "application/json", - items: Optional[List[_models.ItemParam]] = None, + items: Optional[List[_models.InputItem]] = None, previous_update_id: Optional[str] = None, update_delay: Optional[int] = None, **kwargs: Any, @@ -50,7 +50,7 @@ def begin_update_memories( Default value is "application/json". :paramtype content_type: str :keyword items: Conversation items from which to extract memories. Default value is None. - :paramtype items: list[~azure.ai.projects.models.ItemParam] + :paramtype items: list[~azure.ai.projects.models.InputItem] :keyword previous_update_id: The unique ID of the previous update request, enabling incremental memory updates from where the last operation left off. Default value is None. :paramtype previous_update_id: str @@ -119,7 +119,7 @@ def begin_update_memories( body: Union[JSON, IO[bytes]] = _Unset, *, scope: str = _Unset, - items: Optional[List[_models.ItemParam]] = None, + items: Optional[List[_models.InputItem]] = None, previous_update_id: Optional[str] = None, update_delay: Optional[int] = None, **kwargs: Any, @@ -134,7 +134,7 @@ def begin_update_memories( Required. :paramtype scope: str :keyword items: Conversation items from which to extract memories. Default value is None. - :paramtype items: list[~azure.ai.projects.models.ItemParam] + :paramtype items: list[~azure.ai.projects.models.InputItem] :keyword previous_update_id: The unique ID of the previous update request, enabling incremental memory updates from where the last operation left off. Default value is None. :paramtype previous_update_id: str @@ -193,9 +193,9 @@ def get_long_running_output(pipeline_response): usage = MemoryStoreOperationUsage( embedding_tokens=0, input_tokens=0, - input_tokens_details=MemoryStoreOperationUsageInputTokensDetails(cached_tokens=0), + input_tokens_details=ResponseUsageInputTokensDetails(cached_tokens=0), output_tokens=0, - output_tokens_details=MemoryStoreOperationUsageOutputTokensDetails(reasoning_tokens=0), + output_tokens_details=ResponseUsageOutputTokensDetails(reasoning_tokens=0), total_tokens=0, ) deserialized = MemoryStoreUpdateCompletedResult(memory_operations=[], usage=usage) diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/telemetry/_ai_project_instrumentor.py b/sdk/ai/azure-ai-projects/azure/ai/projects/telemetry/_ai_project_instrumentor.py index 4297fcee6892..ba9a5da65c3a 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/telemetry/_ai_project_instrumentor.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/telemetry/_ai_project_instrumentor.py @@ -488,6 +488,11 @@ def _add_instructions_event( # Convert schema to JSON string if it's a dict/object if isinstance(response_schema, dict): schema_str = json.dumps(response_schema, ensure_ascii=False) + elif hasattr(response_schema, "as_dict"): + # Handle model objects that have as_dict() method (e.g., ResponseFormatJsonSchemaSchema) + schema_dict = response_schema.as_dict() + schema_str = json.dumps(schema_dict, ensure_ascii=False) + # TODO: is this 'elif' still needed, not that we added the above? elif hasattr(response_schema, "__dict__"): # Handle model objects by converting to dict first schema_dict = {k: v for k, v in response_schema.__dict__.items() if not k.startswith("_")} diff --git a/sdk/ai/azure-ai-projects/cspell.json b/sdk/ai/azure-ai-projects/cspell.json index 12f5733ae5c7..dbf2fceabc01 100644 --- a/sdk/ai/azure-ai-projects/cspell.json +++ b/sdk/ai/azure-ai-projects/cspell.json @@ -2,38 +2,40 @@ "ignoreWords": [ "agentic", "aiproject", - "azureopenai", - "GLEU", + "aiprojectclient", "aiservices", - "azureai", - "GENAI", - "UPIA", "ansii", - "getconnectionwithcredentials", - "quantitive", - "balapvbyostoragecanary", - "fspath", "aread", - "inpainting", - "CSDL", - "fstring", - "aiprojectclient", - "Tadmaq", - "Udbk", - "Ministral", + "azureai", + "azureopenai", + "balapvbyostoragecanary", + "closefd", "cogsvc", + "CSDL", "evals", "FineTuning", - "ftjob", + "fspath", + "fstring", "ftchkpt", - "closefd" + "ftjob", + "GENAI", + "getconnectionwithcredentials", + "GLEU", + "inpainting", + "Ministral", + "mpkjc", + "quantitive", + "Tadmaq", + "Udbk", + "UPIA", + "xhigh" + ], + "ignorePaths": [ + "*.csv", + "*.json", + "*.jsonl" ], - "ignorePaths": [ - "*.csv", - "*.json", - "*.jsonl" - ], - "words": [ - "Pxqzykebv" - ] + "words": [ + "Pxqzykebv" + ] } diff --git a/sdk/ai/azure-ai-projects/post-emitter-fixes.cmd b/sdk/ai/azure-ai-projects/post-emitter-fixes.cmd index 5baa4bc23f33..6826a3a15591 100644 --- a/sdk/ai/azure-ai-projects/post-emitter-fixes.cmd +++ b/sdk/ai/azure-ai-projects/post-emitter-fixes.cmd @@ -20,15 +20,14 @@ REM Rename `"items_property": items`, to `"items": items` in search_memories and powershell -Command "(Get-Content azure\ai\projects\aio\operations\_operations.py) -replace '\"items_property\": items', '\"items\": items' | Set-Content azure\ai\projects\aio\operations\_operations.py" powershell -Command "(Get-Content azure\ai\projects\operations\_operations.py) -replace '\"items_property\": items', '\"items\": items' | Set-Content azure\ai\projects\operations\_operations.py" -REM Add quotation marks around "str" in the expression: content: Union[str, list["_models.ItemContent"]] = rest_field( -REM This fixes the serialization of this expression: item_param: ItemParam = ResponsesUserMessageItemParam(content="my text") -powershell -Command "(Get-Content azure\ai\projects\models\_models.py) -replace 'Union\[str, list\[\"_models\.ItemContent\"\]\] = rest_field\(', 'Union[\"str\", list[\"_models.ItemContent\"]] = rest_field(' | Set-Content azure\ai\projects\models\_models.py" - REM Fix type annotations by replacing "_types.Filters" with proper union type to fix Pyright errors powershell -Command "(Get-Content azure\ai\projects\models\_models.py) -replace '\"_types\.Filters\"', 'Union[\"_models.ComparisonFilter\", \"_models.CompoundFilter\"]' | Set-Content azure\ai\projects\models\_models.py" REM Add additional pylint disables to the model_base.py file powershell -Command "(Get-Content azure\ai\projects\_utils\model_base.py) -replace '# pylint: disable=protected-access, broad-except', '# pylint: disable=protected-access, broad-except, import-error, no-value-for-parameter' | Set-Content azure\ai\projects\_utils\model_base.py" +REM Add pyright ignore comment to created_by fields to suppress reportIncompatibleVariableOverride errors +powershell -Command "(Get-Content azure\ai\projects\models\_models.py) -replace 'created_by: Optional\[str\] = rest_field\(visibility=\[\"read\", \"create\", \"update\", \"delete\", \"query\"\]\)', 'created_by: Optional[str] = rest_field(visibility=[\"read\", \"create\", \"update\", \"delete\", \"query\"]) # pyright: ignore[reportIncompatibleVariableOverride]' | Set-Content azure\ai\projects\models\_models.py" + echo Now do these additional changes manually, if you want the "Generate docs" job to succeed in PR pipeline -REM 1. Remove `generate_summary` from class `Reasoning`. It's deprecated but causes two types of errors. Consider removing it from TypeSpec. +REM Remove `generate_summary` from class `Reasoning`. It's deprecated but causes two types of errors. Consider removing it from TypeSpec. diff --git a/sdk/ai/azure-ai-projects/pyproject.toml b/sdk/ai/azure-ai-projects/pyproject.toml index 410bc99ea30e..7f55115918ef 100644 --- a/sdk/ai/azure-ai-projects/pyproject.toml +++ b/sdk/ai/azure-ai-projects/pyproject.toml @@ -33,7 +33,8 @@ keywords = ["azure", "azure sdk"] dependencies = [ "isodate>=0.6.1", - "azure-core>=1.35.0", + "azure-core>=1.36.0", + "typing-extensions>=4.11", "azure-identity>=1.15.0", "openai>=2.8.0", "azure-storage-blob>=12.15.0", diff --git a/sdk/ai/azure-ai-projects/pyrightconfig.json b/sdk/ai/azure-ai-projects/pyrightconfig.json index b5a3fafb4354..b8b684eb3af1 100644 --- a/sdk/ai/azure-ai-projects/pyrightconfig.json +++ b/sdk/ai/azure-ai-projects/pyrightconfig.json @@ -2,8 +2,6 @@ "reportTypeCommentUsage": true, "reportMissingImports": false, "pythonVersion": "3.11", - "exclude": [ - ], "extraPaths": [ "./../../core/azure-core", "./../../evaluation/azure-ai-evaluation", diff --git a/sdk/ai/azure-ai-projects/samples/agents/sample_agent_structured_output.py b/sdk/ai/azure-ai-projects/samples/agents/sample_agent_structured_output.py index e2fd112e2301..31d84821591e 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/sample_agent_structured_output.py +++ b/sdk/ai/azure-ai-projects/samples/agents/sample_agent_structured_output.py @@ -38,7 +38,7 @@ from azure.ai.projects.models import ( PromptAgentDefinition, PromptAgentDefinitionText, - ResponseTextFormatConfigurationJsonSchema, + TextResponseFormatJsonSchema, ) from pydantic import BaseModel, Field @@ -65,9 +65,7 @@ class CalendarEvent(BaseModel): definition=PromptAgentDefinition( model=os.environ["AZURE_AI_MODEL_DEPLOYMENT_NAME"], text=PromptAgentDefinitionText( - format=ResponseTextFormatConfigurationJsonSchema( - name="CalendarEvent", schema=CalendarEvent.model_json_schema() - ) + format=TextResponseFormatJsonSchema(name="CalendarEvent", schema=CalendarEvent.model_json_schema()) ), instructions=""" You are a helpful assistant that extracts calendar event information from the input user messages, diff --git a/sdk/ai/azure-ai-projects/samples/agents/sample_agent_structured_output_async.py b/sdk/ai/azure-ai-projects/samples/agents/sample_agent_structured_output_async.py index 0f6ca012249d..beb96d3800a9 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/sample_agent_structured_output_async.py +++ b/sdk/ai/azure-ai-projects/samples/agents/sample_agent_structured_output_async.py @@ -39,7 +39,7 @@ from azure.ai.projects.models import ( PromptAgentDefinition, PromptAgentDefinitionText, - ResponseTextFormatConfigurationJsonSchema, + TextResponseFormatJsonSchema, ) from pydantic import BaseModel, Field @@ -66,9 +66,7 @@ async def main() -> None: definition=PromptAgentDefinition( model=os.environ["AZURE_AI_MODEL_DEPLOYMENT_NAME"], text=PromptAgentDefinitionText( - format=ResponseTextFormatConfigurationJsonSchema( - name="CalendarEvent", schema=CalendarEvent.model_json_schema() - ) + format=TextResponseFormatJsonSchema(name="CalendarEvent", schema=CalendarEvent.model_json_schema()) ), instructions=""" You are a helpful assistant that extracts calendar event information from the input user messages, diff --git a/sdk/ai/azure-ai-projects/samples/agents/sample_workflow_multi_agent.py b/sdk/ai/azure-ai-projects/samples/agents/sample_workflow_multi_agent.py index 4da066eed149..65e51a97aa6c 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/sample_workflow_multi_agent.py +++ b/sdk/ai/azure-ai-projects/samples/agents/sample_workflow_multi_agent.py @@ -30,9 +30,8 @@ from azure.ai.projects import AIProjectClient from azure.ai.projects.models import ( PromptAgentDefinition, - ResponseStreamEventType, WorkflowAgentDefinition, - ItemType, + ItemResourceType, ) load_dotenv() @@ -157,9 +156,8 @@ for event in stream: print(f"Event {event.sequence_number} type '{event.type}'", end="") if ( - event.type == ResponseStreamEventType.RESPONSE_OUTPUT_ITEM_ADDED - or event.type == ResponseStreamEventType.RESPONSE_OUTPUT_ITEM_DONE - ) and event.item.type == ItemType.WORKFLOW_ACTION: + event.type == "response.output_item.added" or event.type == "response.output_item.done" + ) and event.item.type == ItemResourceType.WORKFLOW_ACTION: print( f": item action ID '{event.item.action_id}' is '{event.item.status}' (previous action ID: '{event.item.previous_action_id}')", end="", diff --git a/sdk/ai/azure-ai-projects/samples/agents/sample_workflow_multi_agent_async.py b/sdk/ai/azure-ai-projects/samples/agents/sample_workflow_multi_agent_async.py index 4e07d00610c6..293fbcf5fa09 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/sample_workflow_multi_agent_async.py +++ b/sdk/ai/azure-ai-projects/samples/agents/sample_workflow_multi_agent_async.py @@ -31,9 +31,8 @@ from azure.ai.projects.aio import AIProjectClient from azure.ai.projects.models import ( PromptAgentDefinition, - ResponseStreamEventType, WorkflowAgentDefinition, - ItemType, + ItemResourceType, ) load_dotenv() @@ -159,9 +158,8 @@ async def main(): async for event in stream: print(f"Event {event.sequence_number} type '{event.type}'", end="") if ( - event.type == ResponseStreamEventType.RESPONSE_OUTPUT_ITEM_ADDED - or event.type == ResponseStreamEventType.RESPONSE_OUTPUT_ITEM_DONE - ) and event.item.type == ItemType.WORKFLOW_ACTION: + event.type == "response.output_item.added" or event.type == "response.output_item.done" + ) and event.item.type == ItemResourceType.WORKFLOW_ACTION: print( f": item action ID '{event.item.action_id}' is '{event.item.status}' (previous action ID: '{event.item.previous_action_id}')", end="", diff --git a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_ai_search.py b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_ai_search.py index 263aee360715..876ed221a2fe 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_ai_search.py +++ b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_ai_search.py @@ -6,7 +6,7 @@ """ DESCRIPTION: This sample demonstrates how to create an AI agent with Azure AI Search capabilities - using the AzureAISearchAgentTool and synchronous Azure AI Projects client. The agent can search + using the AzureAISearchTool and synchronous Azure AI Projects client. The agent can search indexed content and provide responses with citations from search results. USAGE: @@ -32,7 +32,7 @@ from azure.identity import DefaultAzureCredential from azure.ai.projects import AIProjectClient from azure.ai.projects.models import ( - AzureAISearchAgentTool, + AzureAISearchTool, PromptAgentDefinition, AzureAISearchToolResource, AISearchIndexResource, @@ -50,7 +50,7 @@ ): # [START tool_declaration] - tool = AzureAISearchAgentTool( + tool = AzureAISearchTool( azure_ai_search=AzureAISearchToolResource( indexes=[ AISearchIndexResource( diff --git a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_bing_custom_search.py b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_bing_custom_search.py index f24b96604038..ea15b1c331a5 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_bing_custom_search.py +++ b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_bing_custom_search.py @@ -7,7 +7,7 @@ """ DESCRIPTION: This sample demonstrates how to create an AI agent with Bing Custom Search capabilities - using the BingCustomSearchAgentTool and synchronous Azure AI Projects client. The agent can search + using the BingCustomSearchPreviewTool and synchronous Azure AI Projects client. The agent can search custom search instances and provide responses with relevant results. USAGE: @@ -34,7 +34,7 @@ from azure.ai.projects import AIProjectClient from azure.ai.projects.models import ( PromptAgentDefinition, - BingCustomSearchAgentTool, + BingCustomSearchPreviewTool, BingCustomSearchToolParameters, BingCustomSearchConfiguration, ) @@ -50,7 +50,7 @@ ): # [START tool_declaration] - tool = BingCustomSearchAgentTool( + tool = BingCustomSearchPreviewTool( bing_custom_search_preview=BingCustomSearchToolParameters( search_configurations=[ BingCustomSearchConfiguration( diff --git a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_bing_grounding.py b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_bing_grounding.py index c6110d4d3c79..b09aa64347ee 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_bing_grounding.py +++ b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_bing_grounding.py @@ -7,11 +7,11 @@ """ DESCRIPTION: This sample demonstrates how to create an AI agent with Bing grounding capabilities - using the BingGroundingAgentTool and synchronous Azure AI Projects client. The agent can search + using the BingGroundingTool and synchronous Azure AI Projects client. The agent can search the web for current information and provide grounded responses with URL citations. The sample shows: - - Creating an agent with BingGroundingAgentTool configured for web search + - Creating an agent with BingGroundingTool configured for web search - Making requests that require current information from the web - Extracting URL citations from the response annotations - Processing grounded responses with source citations @@ -38,7 +38,7 @@ from azure.ai.projects import AIProjectClient from azure.ai.projects.models import ( PromptAgentDefinition, - BingGroundingAgentTool, + BingGroundingTool, BingGroundingSearchToolParameters, BingGroundingSearchConfiguration, ) @@ -54,7 +54,7 @@ ): # [START tool_declaration] - tool = BingGroundingAgentTool( + tool = BingGroundingTool( bing_grounding=BingGroundingSearchToolParameters( search_configurations=[ BingGroundingSearchConfiguration(project_connection_id=os.environ["BING_PROJECT_CONNECTION_ID"]) diff --git a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_browser_automation.py b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_browser_automation.py index 0a54a5646f1f..5156baa55fd3 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_browser_automation.py +++ b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_browser_automation.py @@ -6,7 +6,7 @@ """ DESCRIPTION: This sample demonstrates how to create an AI agent with Browser Automation capabilities - using the BrowserAutomationAgentTool and synchronous Azure AI Projects client. The agent can + using the BrowserAutomationPreviewTool and synchronous Azure AI Projects client. The agent can perform automated web browsing tasks and provide responses based on web interactions. USAGE: @@ -32,7 +32,7 @@ from azure.ai.projects import AIProjectClient from azure.ai.projects.models import ( PromptAgentDefinition, - BrowserAutomationAgentTool, + BrowserAutomationPreviewTool, BrowserAutomationToolParameters, BrowserAutomationToolConnectionParameters, ) @@ -48,7 +48,7 @@ ): # [START tool_declaration] - tool = BrowserAutomationAgentTool( + tool = BrowserAutomationPreviewTool( browser_automation_preview=BrowserAutomationToolParameters( connection=BrowserAutomationToolConnectionParameters( project_connection_id=os.environ["BROWSER_AUTOMATION_PROJECT_CONNECTION_ID"], diff --git a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_code_interpreter.py b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_code_interpreter.py index db84258efae8..443f736d5cff 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_code_interpreter.py +++ b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_code_interpreter.py @@ -28,7 +28,7 @@ from dotenv import load_dotenv from azure.identity import DefaultAzureCredential from azure.ai.projects import AIProjectClient -from azure.ai.projects.models import PromptAgentDefinition, CodeInterpreterTool, CodeInterpreterToolAuto +from azure.ai.projects.models import PromptAgentDefinition, CodeInterpreterTool, CodeInterpreterContainerAuto load_dotenv() @@ -48,7 +48,7 @@ # Upload the CSV file for the code interpreter file = openai_client.files.create(purpose="assistants", file=open(asset_file_path, "rb")) - tool = CodeInterpreterTool(container=CodeInterpreterToolAuto(file_ids=[file.id])) + tool = CodeInterpreterTool(container=CodeInterpreterContainerAuto(file_ids=[file.id])) # [END tool_declaration] print(f"File uploaded (id: {file.id})") diff --git a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_code_interpreter_async.py b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_code_interpreter_async.py index b794599b70af..0f8f8eacf6ee 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_code_interpreter_async.py +++ b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_code_interpreter_async.py @@ -29,7 +29,7 @@ from dotenv import load_dotenv from azure.identity.aio import DefaultAzureCredential from azure.ai.projects.aio import AIProjectClient -from azure.ai.projects.models import PromptAgentDefinition, CodeInterpreterTool, CodeInterpreterToolAuto +from azure.ai.projects.models import PromptAgentDefinition, CodeInterpreterTool, CodeInterpreterContainerAuto load_dotenv() @@ -61,7 +61,7 @@ async def main() -> None: definition=PromptAgentDefinition( model=os.environ["AZURE_AI_MODEL_DEPLOYMENT_NAME"], instructions="You are a helpful assistant.", - tools=[CodeInterpreterTool(container=CodeInterpreterToolAuto(file_ids=[file.id]))], + tools=[CodeInterpreterTool(container=CodeInterpreterContainerAuto(file_ids=[file.id]))], ), description="Code interpreter agent for data analysis and visualization.", ) diff --git a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_fabric.py b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_fabric.py index 20c9a062905c..3107cfe87e23 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_fabric.py +++ b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_fabric.py @@ -6,7 +6,7 @@ """ DESCRIPTION: This sample demonstrates how to create an AI agent with Microsoft Fabric capabilities - using the MicrosoftFabricAgentTool and synchronous Azure AI Projects client. The agent can query + using the MicrosoftFabricPreviewTool and synchronous Azure AI Projects client. The agent can query Fabric data sources and provide responses based on data analysis. USAGE: @@ -32,7 +32,7 @@ from azure.ai.projects import AIProjectClient from azure.ai.projects.models import ( PromptAgentDefinition, - MicrosoftFabricAgentTool, + MicrosoftFabricPreviewTool, FabricDataAgentToolParameters, ToolProjectConnection, ) @@ -47,7 +47,7 @@ project_client.get_openai_client() as openai_client, ): # [START tool_declaration] - tool = MicrosoftFabricAgentTool( + tool = MicrosoftFabricPreviewTool( fabric_dataagent_preview=FabricDataAgentToolParameters( project_connections=[ ToolProjectConnection(project_connection_id=os.environ["FABRIC_PROJECT_CONNECTION_ID"]) diff --git a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_image_generation_async.py b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_image_generation_async.py index b4ae4813aef3..0ad945453783 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_image_generation_async.py +++ b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_image_generation_async.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. diff --git a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_memory_search.py b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_memory_search.py index 7b52cb2dcf10..dd08e302693d 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_memory_search.py +++ b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_memory_search.py @@ -42,7 +42,7 @@ from azure.ai.projects import AIProjectClient from azure.ai.projects.models import ( MemoryStoreDefaultDefinition, - MemorySearchTool, + MemorySearchPreviewTool, PromptAgentDefinition, MemoryStoreDefaultOptions, ) @@ -85,7 +85,7 @@ # You can also use "{{$userId}}" to take the oid of the request authentication header scope = "user_123" - tool = MemorySearchTool( + tool = MemorySearchPreviewTool( memory_store_name=memory_store.name, scope=scope, update_delay=1, # Wait 1 second of inactivity before updating memories diff --git a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_memory_search_async.py b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_memory_search_async.py index c9235085d171..4a54942a9f7a 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_memory_search_async.py +++ b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_memory_search_async.py @@ -42,7 +42,7 @@ from azure.ai.projects.aio import AIProjectClient from azure.ai.projects.models import ( MemoryStoreDefaultDefinition, - MemorySearchTool, + MemorySearchPreviewTool, PromptAgentDefinition, MemoryStoreDefaultOptions, ) @@ -94,7 +94,7 @@ async def main() -> None: model=os.environ["AZURE_AI_MODEL_DEPLOYMENT_NAME"], instructions="You are a helpful assistant that answers general questions", tools=[ - MemorySearchTool( + MemorySearchPreviewTool( memory_store_name=memory_store.name, scope=scope, update_delay=1, # Wait 1 second of inactivity before updating memories diff --git a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_openapi.py b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_openapi.py index a95740e2e53f..94f71a6b4634 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_openapi.py +++ b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_openapi.py @@ -6,7 +6,7 @@ """ DESCRIPTION: This sample demonstrates how to create an AI agent with OpenAPI tool capabilities - using the OpenApiAgentTool and synchronous Azure AI Projects client. The agent can + using the OpenApiTool and synchronous Azure AI Projects client. The agent can call external APIs defined by OpenAPI specifications. USAGE: @@ -31,7 +31,7 @@ from azure.ai.projects import AIProjectClient from azure.ai.projects.models import ( PromptAgentDefinition, - OpenApiAgentTool, + OpenApiTool, OpenApiFunctionDefinition, OpenApiAnonymousAuthDetails, ) @@ -52,7 +52,7 @@ with open(weather_asset_file_path, "r") as f: openapi_weather = jsonref.loads(f.read()) - tool = OpenApiAgentTool( + tool = OpenApiTool( openapi=OpenApiFunctionDefinition( name="get_weather", spec=openapi_weather, diff --git a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_openapi_with_project_connection.py b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_openapi_with_project_connection.py index 6aa541ef6b30..b1f4e2022321 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_openapi_with_project_connection.py +++ b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_openapi_with_project_connection.py @@ -6,7 +6,7 @@ """ DESCRIPTION: This sample demonstrates how to create an AI agent with OpenAPI tool capabilities - using the OpenApiAgentTool with project connection authentication. The agent can + using the OpenApiTool with project connection authentication. The agent can call external APIs defined by OpenAPI specifications, using credentials stored in an Azure AI Project connection. @@ -34,7 +34,7 @@ from azure.ai.projects import AIProjectClient from azure.ai.projects.models import ( PromptAgentDefinition, - OpenApiAgentTool, + OpenApiTool, OpenApiFunctionDefinition, OpenApiProjectConnectionAuthDetails, OpenApiProjectConnectionSecurityScheme, @@ -58,7 +58,7 @@ with open(tripadvisor_asset_file_path, "r") as f: openapi_tripadvisor = jsonref.loads(f.read()) - tool = OpenApiAgentTool( + tool = OpenApiTool( openapi=OpenApiFunctionDefinition( name="tripadvisor", spec=openapi_tripadvisor, diff --git a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_sharepoint.py b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_sharepoint.py index c41be0e489d2..4388b73f3f53 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_sharepoint.py +++ b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_sharepoint.py @@ -7,7 +7,7 @@ """ DESCRIPTION: This sample demonstrates how to create an AI agent with SharePoint capabilities - using the SharepointAgentTool and synchronous Azure AI Projects client. The agent can search + using the SharepointPreviewTool and synchronous Azure AI Projects client. The agent can search SharePoint content and provide responses with relevant information from SharePoint sites. USAGE: @@ -33,7 +33,7 @@ from azure.ai.projects import AIProjectClient from azure.ai.projects.models import ( PromptAgentDefinition, - SharepointAgentTool, + SharepointPreviewTool, SharepointGroundingToolParameters, ToolProjectConnection, ) @@ -48,7 +48,7 @@ project_client.get_openai_client() as openai_client, ): # [START tool_declaration] - tool = SharepointAgentTool( + tool = SharepointPreviewTool( sharepoint_grounding_preview=SharepointGroundingToolParameters( project_connections=[ ToolProjectConnection(project_connection_id=os.environ["SHAREPOINT_PROJECT_CONNECTION_ID"]) diff --git a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_to_agent.py b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_to_agent.py index 7955f28c6777..b190090f9cf3 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_to_agent.py +++ b/sdk/ai/azure-ai-projects/samples/agents/tools/sample_agent_to_agent.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. @@ -6,7 +7,7 @@ """ DESCRIPTION: This sample demonstrates how to create an AI agent with Agent-to-Agent (A2A) capabilities - using the A2ATool and synchronous Azure AI Projects client. The agent can communicate + using the A2APreviewTool and synchronous Azure AI Projects client. The agent can communicate with other agents and provide responses based on inter-agent interactions using the A2A protocol (https://a2a-protocol.org/latest/). @@ -35,7 +36,7 @@ from azure.ai.projects import AIProjectClient from azure.ai.projects.models import ( PromptAgentDefinition, - A2ATool, + A2APreviewTool, ) load_dotenv() @@ -49,7 +50,7 @@ ): # [START tool_declaration] - tool = A2ATool( + tool = A2APreviewTool( project_connection_id=os.environ["A2A_PROJECT_CONNECTION_ID"], ) # If the connection is missing target, we need to set the A2A endpoint URL. diff --git a/sdk/ai/azure-ai-projects/samples/memories/sample_memory_advanced.py b/sdk/ai/azure-ai-projects/samples/memories/sample_memory_advanced.py index e00d3439234e..93d2a41866e4 100644 --- a/sdk/ai/azure-ai-projects/samples/memories/sample_memory_advanced.py +++ b/sdk/ai/azure-ai-projects/samples/memories/sample_memory_advanced.py @@ -38,11 +38,10 @@ from azure.identity import DefaultAzureCredential from azure.ai.projects import AIProjectClient from azure.ai.projects.models import ( + EasyInputMessage, MemoryStoreDefaultDefinition, MemoryStoreDefaultOptions, MemorySearchOptions, - ResponsesUserMessageItemParam, - ResponsesAssistantMessageItemParam, ) load_dotenv() @@ -85,9 +84,7 @@ scope = "user_123" # Extract memories from messages and add them to the memory store - user_message = ResponsesUserMessageItemParam( - content="I prefer dark roast coffee and usually drink it in the morning" - ) + user_message = EasyInputMessage(role="user", content="I prefer dark roast coffee and usually drink it in the morning") update_poller = project_client.memory_stores.begin_update_memories( name=memory_store.name, scope=scope, @@ -97,7 +94,7 @@ print(f"Scheduled memory update operation (Update ID: {update_poller.update_id}, Status: {update_poller.status()})") # Extend the previous update with another update and more messages - new_message = ResponsesUserMessageItemParam(content="I also like cappuccinos in the afternoon") + new_message = EasyInputMessage(role="user", content="I also like cappuccinos in the afternoon") new_update_poller = project_client.memory_stores.begin_update_memories( name=memory_store.name, scope=scope, @@ -124,7 +121,7 @@ ) # Retrieve memories from the memory store - query_message = ResponsesUserMessageItemParam(content="What are my morning coffee preferences?") + query_message = EasyInputMessage(role="user", content="What are my morning coffee preferences?") search_response = project_client.memory_stores.search_memories( name=memory_store.name, scope=scope, items=[query_message], options=MemorySearchOptions(max_memories=5) ) @@ -133,12 +130,8 @@ print(f" - Memory ID: {memory.memory_item.memory_id}, Content: {memory.memory_item.content}") # Perform another search using the previous search as context - agent_message = ResponsesAssistantMessageItemParam( - content="You previously indicated a preference for dark roast coffee in the morning." - ) - followup_query = ResponsesUserMessageItemParam( - content="What about afternoon?" # Follow-up assuming context from previous messages - ) + agent_message = EasyInputMessage(role="assistant", content="You previously indicated a preference for dark roast coffee in the morning.") + followup_query = EasyInputMessage(role="user", content="What about afternoon?") followup_search_response = project_client.memory_stores.search_memories( name=memory_store.name, scope=scope, diff --git a/sdk/ai/azure-ai-projects/samples/memories/sample_memory_advanced_async.py b/sdk/ai/azure-ai-projects/samples/memories/sample_memory_advanced_async.py index 96c6ed838d78..0f607302208e 100644 --- a/sdk/ai/azure-ai-projects/samples/memories/sample_memory_advanced_async.py +++ b/sdk/ai/azure-ai-projects/samples/memories/sample_memory_advanced_async.py @@ -39,11 +39,10 @@ from azure.identity.aio import DefaultAzureCredential from azure.ai.projects.aio import AIProjectClient from azure.ai.projects.models import ( + EasyInputMessage, MemoryStoreDefaultDefinition, MemoryStoreDefaultOptions, MemorySearchOptions, - ResponsesUserMessageItemParam, - ResponsesAssistantMessageItemParam, ) load_dotenv() @@ -89,9 +88,7 @@ async def main() -> None: scope = "user_123" # Extract memories from messages and add them to the memory store - user_message = ResponsesUserMessageItemParam( - content="I prefer dark roast coffee and usually drink it in the morning" - ) + user_message = EasyInputMessage(role="user", content="I prefer dark roast coffee and usually drink it in the morning") update_poller = await project_client.memory_stores.begin_update_memories( name=memory_store.name, scope=scope, @@ -103,7 +100,7 @@ async def main() -> None: ) # Extend the previous update with another update and more messages - new_message = ResponsesUserMessageItemParam(content="I also like cappuccinos in the afternoon") + new_message = EasyInputMessage(role="user", content="I also like cappuccinos in the afternoon") new_update_poller = await project_client.memory_stores.begin_update_memories( name=memory_store.name, scope=scope, @@ -130,7 +127,7 @@ async def main() -> None: ) # Retrieve memories from the memory store - query_message = ResponsesUserMessageItemParam(content="What are my morning coffee preferences?") + query_message = EasyInputMessage(role="user", content="What are my morning coffee preferences?") search_response = await project_client.memory_stores.search_memories( name=memory_store.name, scope=scope, items=[query_message], options=MemorySearchOptions(max_memories=5) ) @@ -139,12 +136,8 @@ async def main() -> None: print(f" - Memory ID: {memory.memory_item.memory_id}, Content: {memory.memory_item.content}") # Perform another search using the previous search as context - agent_message = ResponsesAssistantMessageItemParam( - content="You previously indicated a preference for dark roast coffee in the morning." - ) - followup_query = ResponsesUserMessageItemParam( - content="What about afternoon?" # Follow-up assuming context from previous messages - ) + agent_message = EasyInputMessage(role="assistant", content="You previously indicated a preference for dark roast coffee in the morning.") + followup_query = EasyInputMessage(role="user", content="What about afternoon?") followup_search_response = await project_client.memory_stores.search_memories( name=memory_store.name, scope=scope, diff --git a/sdk/ai/azure-ai-projects/samples/memories/sample_memory_basic.py b/sdk/ai/azure-ai-projects/samples/memories/sample_memory_basic.py index 2374aca75c1a..3969e8ffb779 100644 --- a/sdk/ai/azure-ai-projects/samples/memories/sample_memory_basic.py +++ b/sdk/ai/azure-ai-projects/samples/memories/sample_memory_basic.py @@ -36,10 +36,10 @@ from azure.identity import DefaultAzureCredential from azure.ai.projects import AIProjectClient from azure.ai.projects.models import ( + EasyInputMessage, MemoryStoreDefaultDefinition, MemoryStoreDefaultOptions, MemorySearchOptions, - ResponsesUserMessageItemParam, ) load_dotenv() @@ -81,10 +81,8 @@ # You can also use "{{$userId}}" to take the oid of the request authentication header scope = "user_123" - # Add memories to the memory store - user_message = ResponsesUserMessageItemParam( - content="I prefer dark roast coffee and usually drink it in the morning" - ) + # Add a memory to the memory store + user_message = EasyInputMessage(role="user", content="I prefer dark roast coffee and usually drink it in the morning") update_poller = project_client.memory_stores.begin_update_memories( name=memory_store.name, scope=scope, @@ -101,7 +99,7 @@ ) # Retrieve memories from the memory store - query_message = ResponsesUserMessageItemParam(content="What are my coffee preferences?") + query_message = EasyInputMessage(role="user", content="What are my coffee preferences?") search_response = project_client.memory_stores.search_memories( name=memory_store.name, scope=scope, items=[query_message], options=MemorySearchOptions(max_memories=5) ) diff --git a/sdk/ai/azure-ai-projects/samples/memories/sample_memory_basic_async.py b/sdk/ai/azure-ai-projects/samples/memories/sample_memory_basic_async.py index 579c086adc9a..6cf110c3a932 100644 --- a/sdk/ai/azure-ai-projects/samples/memories/sample_memory_basic_async.py +++ b/sdk/ai/azure-ai-projects/samples/memories/sample_memory_basic_async.py @@ -38,10 +38,10 @@ from azure.identity.aio import DefaultAzureCredential from azure.ai.projects.aio import AIProjectClient from azure.ai.projects.models import ( + EasyInputMessage, MemoryStoreDefaultDefinition, MemoryStoreDefaultOptions, MemorySearchOptions, - ResponsesUserMessageItemParam, ) load_dotenv() @@ -86,10 +86,8 @@ async def main() -> None: # You can also use "{{$userId}}" to take the oid of the request authentication header scope = "user_123" - # Add memories to the memory store - user_message = ResponsesUserMessageItemParam( - content="I prefer dark roast coffee and usually drink it in the morning" - ) + # Add a memory to the memory store + user_message = EasyInputMessage(role="user", content="I prefer dark roast coffee and usually drink it in the morning") update_poller = await project_client.memory_stores.begin_update_memories( name=memory_store.name, scope=scope, @@ -106,7 +104,7 @@ async def main() -> None: ) # Retrieve memories from the memory store - query_message = ResponsesUserMessageItemParam(content="What are my coffee preferences?") + query_message = EasyInputMessage(role="user", content="What are my coffee preferences?") search_response = await project_client.memory_stores.search_memories( name=memory_store.name, scope=scope, items=[query_message], options=MemorySearchOptions(max_memories=5) ) diff --git a/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_ai_agents_instrumentor.py b/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_ai_agents_instrumentor.py index 18286a7ae0a6..fe1f1ae1c27b 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_ai_agents_instrumentor.py +++ b/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_ai_agents_instrumentor.py @@ -15,7 +15,7 @@ from azure.ai.projects.models import ( Reasoning, FunctionTool, - ResponseTextFormatConfigurationText, + # ResponseTextFormatConfigurationText, ) from devtools_testutils import ( recorded_by_proxy, @@ -516,7 +516,7 @@ def test_agent_with_structured_output_with_instructions_content_recording_enable assert True == AIProjectInstrumentor().is_content_recording_enabled() assert True == AIProjectInstrumentor().is_instrumented() - from azure.ai.projects.models import ResponseTextFormatConfigurationJsonSchema + from azure.ai.projects.models import TextResponseFormatJsonSchema with self.create_client(operation_group="tracing", **kwargs) as project_client: @@ -536,7 +536,7 @@ def test_agent_with_structured_output_with_instructions_content_recording_enable model=model, instructions="You are a helpful assistant that extracts person information.", text=PromptAgentDefinitionText( - format=ResponseTextFormatConfigurationJsonSchema( + format=TextResponseFormatJsonSchema( name="PersonInfo", schema=test_schema, ) @@ -603,7 +603,7 @@ def test_agent_with_structured_output_with_instructions_content_recording_disabl assert False == AIProjectInstrumentor().is_content_recording_enabled() assert True == AIProjectInstrumentor().is_instrumented() - from azure.ai.projects.models import ResponseTextFormatConfigurationJsonSchema + from azure.ai.projects.models import TextResponseFormatJsonSchema with self.create_client(operation_group="agents", **kwargs) as project_client: @@ -622,7 +622,7 @@ def test_agent_with_structured_output_with_instructions_content_recording_disabl model=model, instructions="You are a helpful assistant that extracts person information.", text=PromptAgentDefinitionText( - format=ResponseTextFormatConfigurationJsonSchema( + format=TextResponseFormatJsonSchema( name="PersonInfo", schema=test_schema, ) @@ -677,7 +677,7 @@ def test_agent_with_structured_output_without_instructions_content_recording_ena assert True == AIProjectInstrumentor().is_content_recording_enabled() assert True == AIProjectInstrumentor().is_instrumented() - from azure.ai.projects.models import ResponseTextFormatConfigurationJsonSchema + from azure.ai.projects.models import TextResponseFormatJsonSchema with self.create_client(operation_group="tracing", **kwargs) as project_client: @@ -695,7 +695,7 @@ def test_agent_with_structured_output_without_instructions_content_recording_ena model=model, # No instructions provided text=PromptAgentDefinitionText( - format=ResponseTextFormatConfigurationJsonSchema( + format=TextResponseFormatJsonSchema( name="Result", schema=test_schema, ) @@ -759,7 +759,7 @@ def test_agent_with_structured_output_without_instructions_content_recording_dis assert False == AIProjectInstrumentor().is_content_recording_enabled() assert True == AIProjectInstrumentor().is_instrumented() - from azure.ai.projects.models import ResponseTextFormatConfigurationJsonSchema + from azure.ai.projects.models import TextResponseFormatJsonSchema with self.create_client(operation_group="agents", **kwargs) as project_client: @@ -777,7 +777,7 @@ def test_agent_with_structured_output_without_instructions_content_recording_dis model=model, # No instructions provided text=PromptAgentDefinitionText( - format=ResponseTextFormatConfigurationJsonSchema( + format=TextResponseFormatJsonSchema( name="Result", schema=test_schema, ) diff --git a/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_ai_agents_instrumentor_async.py b/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_ai_agents_instrumentor_async.py index 0e486fd97ae1..39c878fd889c 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_ai_agents_instrumentor_async.py +++ b/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_ai_agents_instrumentor_async.py @@ -13,7 +13,7 @@ from azure.ai.projects.models import ( Reasoning, FunctionTool, - ResponseTextFormatConfigurationText, + # ResponseTextFormatConfigurationText, ) from devtools_testutils.aio import recorded_by_proxy_async @@ -394,7 +394,7 @@ async def test_agent_with_structured_output_with_instructions_content_recording_ assert True == AIProjectInstrumentor().is_content_recording_enabled() assert True == AIProjectInstrumentor().is_instrumented() - from azure.ai.projects.models import ResponseTextFormatConfigurationJsonSchema + from azure.ai.projects.models import TextResponseFormatJsonSchema project_client = self.create_async_client(operation_group="tracing", **kwargs) @@ -415,7 +415,7 @@ async def test_agent_with_structured_output_with_instructions_content_recording_ model=model, instructions="You are a helpful assistant that extracts person information.", text=PromptAgentDefinitionText( - format=ResponseTextFormatConfigurationJsonSchema( + format=TextResponseFormatJsonSchema( name="PersonInfo", schema=test_schema, ) @@ -484,7 +484,7 @@ async def test_agent_with_structured_output_with_instructions_content_recording_ assert False == AIProjectInstrumentor().is_content_recording_enabled() assert True == AIProjectInstrumentor().is_instrumented() - from azure.ai.projects.models import ResponseTextFormatConfigurationJsonSchema + from azure.ai.projects.models import TextResponseFormatJsonSchema project_client = self.create_async_client(operation_group="agents", **kwargs) @@ -504,7 +504,7 @@ async def test_agent_with_structured_output_with_instructions_content_recording_ model=model, instructions="You are a helpful assistant that extracts person information.", text=PromptAgentDefinitionText( - format=ResponseTextFormatConfigurationJsonSchema( + format=TextResponseFormatJsonSchema( name="PersonInfo", schema=test_schema, ) @@ -561,7 +561,7 @@ async def test_agent_with_structured_output_without_instructions_content_recordi assert True == AIProjectInstrumentor().is_content_recording_enabled() assert True == AIProjectInstrumentor().is_instrumented() - from azure.ai.projects.models import ResponseTextFormatConfigurationJsonSchema + from azure.ai.projects.models import TextResponseFormatJsonSchema project_client = self.create_async_client(operation_group="tracing", **kwargs) @@ -580,7 +580,7 @@ async def test_agent_with_structured_output_without_instructions_content_recordi model=model, # No instructions provided text=PromptAgentDefinitionText( - format=ResponseTextFormatConfigurationJsonSchema( + format=TextResponseFormatJsonSchema( name="Result", schema=test_schema, ) @@ -644,7 +644,7 @@ async def test_agent_with_structured_output_without_instructions_content_recordi assert False == AIProjectInstrumentor().is_content_recording_enabled() assert True == AIProjectInstrumentor().is_instrumented() - from azure.ai.projects.models import ResponseTextFormatConfigurationJsonSchema + from azure.ai.projects.models import TextResponseFormatJsonSchema project_client = self.create_async_client(operation_group="agents", **kwargs) @@ -663,7 +663,7 @@ async def test_agent_with_structured_output_without_instructions_content_recordi model=model, # No instructions provided text=PromptAgentDefinitionText( - format=ResponseTextFormatConfigurationJsonSchema( + format=TextResponseFormatJsonSchema( name="Result", schema=test_schema, ) diff --git a/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_responses_instrumentor.py b/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_responses_instrumentor.py index ce090902ffb9..3a7fbdff92ac 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_responses_instrumentor.py +++ b/sdk/ai/azure-ai-projects/tests/agents/telemetry/test_responses_instrumentor.py @@ -4130,7 +4130,6 @@ def test_workflow_agent_streaming_with_content_recording(self, **kwargs): WorkflowAgentDefinition, AgentReference, PromptAgentDefinition, - ResponseStreamEventType, ) self.cleanup() @@ -4254,10 +4253,7 @@ def test_workflow_agent_streaming_with_content_recording(self, **kwargs): # Consume the stream and track workflow actions workflow_action_count = 0 for event in stream: - if ( - event.type == ResponseStreamEventType.RESPONSE_OUTPUT_ITEM_ADDED - and event.item.type == "workflow_action" - ): + if event.type == "response.output_item.added" and event.item.type == "workflow_action": workflow_action_count += 1 # Verify we got workflow actions during streaming diff --git a/sdk/ai/azure-ai-projects/tests/agents/test_agent_responses_crud.py b/sdk/ai/azure-ai-projects/tests/agents/test_agent_responses_crud.py index 0d0e187ff2a6..98f4ada367df 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/test_agent_responses_crud.py +++ b/sdk/ai/azure-ai-projects/tests/agents/test_agent_responses_crud.py @@ -10,7 +10,7 @@ from devtools_testutils import recorded_by_proxy, RecordedTransport from azure.ai.projects.models import ( PromptAgentDefinition, - ResponseTextFormatConfigurationJsonSchema, + TextResponseFormatJsonSchema, PromptAgentDefinitionText, ) @@ -176,9 +176,7 @@ class CalendarEvent(BaseModel): definition=PromptAgentDefinition( model=model, text=PromptAgentDefinitionText( - format=ResponseTextFormatConfigurationJsonSchema( - name="CalendarEvent", schema=CalendarEvent.model_json_schema() - ) + format=TextResponseFormatJsonSchema(name="CalendarEvent", schema=CalendarEvent.model_json_schema()) ), instructions=""" You are a helpful assistant that extracts calendar event information from the input user messages, diff --git a/sdk/ai/azure-ai-projects/tests/agents/test_agent_responses_crud_async.py b/sdk/ai/azure-ai-projects/tests/agents/test_agent_responses_crud_async.py index 8b99aaa8cfcf..9a73410e6821 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/test_agent_responses_crud_async.py +++ b/sdk/ai/azure-ai-projects/tests/agents/test_agent_responses_crud_async.py @@ -11,7 +11,7 @@ from devtools_testutils import RecordedTransport from azure.ai.projects.models import ( PromptAgentDefinition, - ResponseTextFormatConfigurationJsonSchema, + TextResponseFormatJsonSchema, PromptAgentDefinitionText, ) @@ -149,7 +149,7 @@ class CalendarEvent(BaseModel): definition=PromptAgentDefinition( model=model, text=PromptAgentDefinitionText( - format=ResponseTextFormatConfigurationJsonSchema( + format=TextResponseFormatJsonSchema( name="CalendarEvent", schema=CalendarEvent.model_json_schema() ) ), diff --git a/sdk/ai/azure-ai-projects/tests/agents/test_conversation_items_crud.py b/sdk/ai/azure-ai-projects/tests/agents/test_conversation_items_crud.py index 35e9ee088cd8..95aff3fac063 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/test_conversation_items_crud.py +++ b/sdk/ai/azure-ai-projects/tests/agents/test_conversation_items_crud.py @@ -7,14 +7,6 @@ from test_base import TestBase, servicePreparer from devtools_testutils import recorded_by_proxy, RecordedTransport -from azure.ai.projects.models import ( - # ResponsesUserMessageItemParam, - # ResponsesSystemMessageItemParam, - # ItemContentInputText, - ItemType, - ResponsesMessageRole, - ItemContentType, -) class TestConversationItemsCrud(TestBase): @@ -69,16 +61,16 @@ def test_conversation_items_crud(self, **kwargs): assert len(item_list) == 2 self._validate_conversation_item( item_list[0], - expected_type=ItemType.MESSAGE, - expected_role=ResponsesMessageRole.USER, - expected_content_type=ItemContentType.INPUT_TEXT, + expected_type="message", + expected_role="user", + expected_content_type="input_text", expected_content_text="first message", ) self._validate_conversation_item( item_list[1], - expected_type=ItemType.MESSAGE, - expected_role=ResponsesMessageRole.USER, - expected_content_type=ItemContentType.INPUT_TEXT, + expected_type="message", + expected_role="user", + expected_content_type="input_text", expected_content_text="second message", ) item1_id = item_list[0].id @@ -99,15 +91,15 @@ def test_conversation_items_crud(self, **kwargs): # self._validate_conversation_item( # item_list[0], # expected_type=ItemType.MESSAGE, - # expected_role=ResponsesMessageRole.SYSTEM, - # expected_content_type=ItemContentType.INPUT_TEXT, + # expected_role="system", + # expected_content_type=InputContentType.INPUT_TEXT, # expected_content_text="third message", # ) # self._validate_conversation_item( # item_list[1], # expected_type=ItemType.MESSAGE, - # expected_role=ResponsesMessageRole.USER, - # expected_content_type=ItemContentType.INPUT_TEXT, + # expected_role="user", + # expected_content_type=InputContentType.INPUT_TEXT, # expected_content_text="fourth message", # ) # item3_id = item_list[0].id @@ -117,10 +109,10 @@ def test_conversation_items_crud(self, **kwargs): item = client.conversations.items.retrieve(conversation_id=conversation.id, item_id=item1_id) self._validate_conversation_item( item, - expected_type=ItemType.MESSAGE, + expected_type="message", expected_id=item1_id, - expected_role=ResponsesMessageRole.USER, - expected_content_type=ItemContentType.INPUT_TEXT, + expected_role="user", + expected_content_type="input_text", expected_content_text="first message", ) diff --git a/sdk/ai/azure-ai-projects/tests/agents/test_conversation_items_crud_async.py b/sdk/ai/azure-ai-projects/tests/agents/test_conversation_items_crud_async.py index 21359835d963..f6b0379aeab8 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/test_conversation_items_crud_async.py +++ b/sdk/ai/azure-ai-projects/tests/agents/test_conversation_items_crud_async.py @@ -8,17 +8,8 @@ from test_base import TestBase, servicePreparer from devtools_testutils.aio import recorded_by_proxy_async from devtools_testutils import RecordedTransport -from azure.ai.projects.models import ( - # ResponsesUserMessageItemParam, - # ResponsesSystemMessageItemParam, - # ItemContentInputText, - ItemType, - ResponsesMessageRole, - ItemContentType, -) -# TODO: Emitter did not produce the output class OpenAI.ConversationResource. Validating service response as Dict for now. class TestConversationItemsCrudAsync(TestBase): @servicePreparer() @@ -49,16 +40,16 @@ async def test_conversation_items_crud_async(self, **kwargs): assert len(item_list) == 2 self._validate_conversation_item( item_list[0], - expected_type=ItemType.MESSAGE, - expected_role=ResponsesMessageRole.USER, - expected_content_type=ItemContentType.INPUT_TEXT, + expected_type="message", + expected_role="user", + expected_content_type="input_text", expected_content_text="first message", ) self._validate_conversation_item( item_list[1], - expected_type=ItemType.MESSAGE, - expected_role=ResponsesMessageRole.USER, - expected_content_type=ItemContentType.INPUT_TEXT, + expected_type="message", + expected_role="user", + expected_content_type="input_text", expected_content_text="second message", ) item1_id = item_list[0].id @@ -79,15 +70,15 @@ async def test_conversation_items_crud_async(self, **kwargs): # self._validate_conversation_item( # item_list[0], # expected_type=ItemType.MESSAGE, - # expected_role=ResponsesMessageRole.SYSTEM, - # expected_content_type=ItemContentType.INPUT_TEXT, + # expected_role="system", + # expected_content_type=InputContentType.INPUT_TEXT, # expected_content_text="third message", # ) # self._validate_conversation_item( # item_list[1], # expected_type=ItemType.MESSAGE, - # expected_role=ResponsesMessageRole.USER, - # expected_content_type=ItemContentType.INPUT_TEXT, + # expected_role="user", + # expected_content_type=InputContentType.INPUT_TEXT, # expected_content_text="fourth message", # ) # item3_id = item_list[0].id @@ -97,10 +88,10 @@ async def test_conversation_items_crud_async(self, **kwargs): item = await client.conversations.items.retrieve(conversation_id=conversation.id, item_id=item1_id) self._validate_conversation_item( item, - expected_type=ItemType.MESSAGE, + expected_type="message", expected_id=item1_id, - expected_role=ResponsesMessageRole.USER, - expected_content_type=ItemContentType.INPUT_TEXT, + expected_role="user", + expected_content_type="input_text", expected_content_text="first message", ) diff --git a/sdk/ai/azure-ai-projects/tests/agents/tools/multitool/test_agent_code_interpreter_and_function.py b/sdk/ai/azure-ai-projects/tests/agents/tools/multitool/test_agent_code_interpreter_and_function.py index 4a2142d96150..2601424c4a1b 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/tools/multitool/test_agent_code_interpreter_and_function.py +++ b/sdk/ai/azure-ai-projects/tests/agents/tools/multitool/test_agent_code_interpreter_and_function.py @@ -15,7 +15,12 @@ import json from test_base import TestBase, servicePreparer from devtools_testutils import recorded_by_proxy, RecordedTransport -from azure.ai.projects.models import PromptAgentDefinition, CodeInterpreterTool, CodeInterpreterToolAuto, FunctionTool +from azure.ai.projects.models import ( + PromptAgentDefinition, + CodeInterpreterTool, + CodeInterpreterContainerAuto, + FunctionTool, +) from openai.types.responses.response_input_param import FunctionCallOutput, ResponseInputParam @@ -62,7 +67,7 @@ def test_calculate_and_save(self, **kwargs): model=model, instructions="You are a calculator assistant. Use code interpreter to perform calculations, then ALWAYS save the result using the save_result function.", tools=[ - CodeInterpreterTool(container=CodeInterpreterToolAuto()), + CodeInterpreterTool(container=CodeInterpreterContainerAuto()), func_tool, ], ), @@ -124,7 +129,7 @@ def test_generate_data_and_report(self, **kwargs): model=model, instructions="You are a data analyst. Use code interpreter to generate and analyze data, then ALWAYS create a report using the generate_report function with the exact statistics you computed.", tools=[ - CodeInterpreterTool(container=CodeInterpreterToolAuto()), + CodeInterpreterTool(container=CodeInterpreterContainerAuto()), report_function, ], ), diff --git a/sdk/ai/azure-ai-projects/tests/agents/tools/multitool/test_agent_file_search_and_code_interpreter.py b/sdk/ai/azure-ai-projects/tests/agents/tools/multitool/test_agent_file_search_and_code_interpreter.py index 608143619c28..a60340a6571b 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/tools/multitool/test_agent_file_search_and_code_interpreter.py +++ b/sdk/ai/azure-ai-projects/tests/agents/tools/multitool/test_agent_file_search_and_code_interpreter.py @@ -15,7 +15,12 @@ from io import BytesIO from test_base import TestBase, servicePreparer from devtools_testutils import recorded_by_proxy, RecordedTransport -from azure.ai.projects.models import PromptAgentDefinition, FileSearchTool, CodeInterpreterTool, CodeInterpreterToolAuto +from azure.ai.projects.models import ( + PromptAgentDefinition, + FileSearchTool, + CodeInterpreterTool, + CodeInterpreterContainerAuto, +) class TestAgentFileSearchAndCodeInterpreter(TestBase): @@ -83,7 +88,7 @@ def test_find_and_analyze_data(self, **kwargs): instructions="You are a data analyst. Use file search to find data files, then use code interpreter to perform calculations on the data.", tools=[ FileSearchTool(vector_store_ids=[vector_store.id]), - CodeInterpreterTool(container=CodeInterpreterToolAuto()), + CodeInterpreterTool(container=CodeInterpreterContainerAuto()), ], ), description="Agent with File Search and Code Interpreter.", @@ -153,7 +158,7 @@ def fibonacci(n): instructions="You are a code analyst. Use file search to find code files, then use code interpreter to execute and test the code.", tools=[ FileSearchTool(vector_store_ids=[vector_store.id]), - CodeInterpreterTool(container=CodeInterpreterToolAuto()), + CodeInterpreterTool(container=CodeInterpreterContainerAuto()), ], ), description="Agent for code analysis and execution.", diff --git a/sdk/ai/azure-ai-projects/tests/agents/tools/multitool/test_agent_file_search_code_interpreter_function.py b/sdk/ai/azure-ai-projects/tests/agents/tools/multitool/test_agent_file_search_code_interpreter_function.py index 4edbedf51548..1b4bd4462be5 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/tools/multitool/test_agent_file_search_code_interpreter_function.py +++ b/sdk/ai/azure-ai-projects/tests/agents/tools/multitool/test_agent_file_search_code_interpreter_function.py @@ -20,7 +20,7 @@ PromptAgentDefinition, FileSearchTool, CodeInterpreterTool, - CodeInterpreterToolAuto, + CodeInterpreterContainerAuto, FunctionTool, ) from openai.types.responses.response_input_param import FunctionCallOutput, ResponseInputParam @@ -104,7 +104,7 @@ def test_complete_analysis_workflow(self, **kwargs): instructions="You are a data analyst. Use file search to find data files, code interpreter to calculate statistics, and ALWAYS save your analysis using the save_analysis function.", tools=[ FileSearchTool(vector_store_ids=[vector_store.id]), - CodeInterpreterTool(container=CodeInterpreterToolAuto()), + CodeInterpreterTool(container=CodeInterpreterContainerAuto()), func_tool, ], ), diff --git a/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_ai_search.py b/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_ai_search.py index 75f16997451b..0668d960b8ad 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_ai_search.py +++ b/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_ai_search.py @@ -10,7 +10,7 @@ from devtools_testutils import is_live_and_not_recording from azure.ai.projects.models import ( PromptAgentDefinition, - AzureAISearchAgentTool, + AzureAISearchTool, AzureAISearchToolResource, AISearchIndexResource, AzureAISearchQueryType, @@ -60,7 +60,7 @@ def test_agent_ai_search_question_answering(self, **kwargs): significantly faster (~3x) and provides the same coverage. See test_agent_ai_search_async.py::test_agent_ai_search_question_answering_async_parallel - This test verifies that an agent can be created with AzureAISearchAgentTool, + This test verifies that an agent can be created with AzureAISearchTool, use it to search indexed content, and provide accurate answers to questions based on the search results. @@ -112,7 +112,7 @@ def test_agent_ai_search_question_answering(self, **kwargs): Respond with only 'True' or 'False' based on what you find in the search results. If you cannot find clear evidence in the search results, answer 'False'.""", tools=[ - AzureAISearchAgentTool( + AzureAISearchTool( azure_ai_search=AzureAISearchToolResource( indexes=[ AISearchIndexResource( diff --git a/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_ai_search_async.py b/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_ai_search_async.py index 2df569498d15..a6f25b30a756 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_ai_search_async.py +++ b/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_ai_search_async.py @@ -11,7 +11,7 @@ from devtools_testutils import is_live_and_not_recording from azure.ai.projects.models import ( PromptAgentDefinition, - AzureAISearchAgentTool, + AzureAISearchTool, AzureAISearchToolResource, AISearchIndexResource, AzureAISearchQueryType, @@ -125,7 +125,7 @@ async def test_agent_ai_search_question_answering_async_parallel(self, **kwargs) """ Test agent with Azure AI Search capabilities for question answering using async (parallel). - This test verifies that an agent can be created with AzureAISearchAgentTool, + This test verifies that an agent can be created with AzureAISearchTool, and handle multiple concurrent requests to search indexed content and provide accurate answers to questions based on the search results. @@ -181,7 +181,7 @@ async def test_agent_ai_search_question_answering_async_parallel(self, **kwargs) Respond with only 'True' or 'False' based on what you find in the search results. If you cannot find clear evidence in the search results, answer 'False'.""", tools=[ - AzureAISearchAgentTool( + AzureAISearchTool( azure_ai_search=AzureAISearchToolResource( indexes=[ AISearchIndexResource( diff --git a/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_bing_grounding.py b/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_bing_grounding.py index 23754454bab9..9881a1a4a6d0 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_bing_grounding.py +++ b/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_bing_grounding.py @@ -10,7 +10,7 @@ from devtools_testutils import is_live_and_not_recording from azure.ai.projects.models import ( PromptAgentDefinition, - BingGroundingAgentTool, + BingGroundingTool, BingGroundingSearchToolParameters, BingGroundingSearchConfiguration, ) @@ -27,7 +27,7 @@ def test_agent_bing_grounding(self, **kwargs): """ Test agent with Bing grounding capabilities. - This test verifies that an agent can be created with BingGroundingAgentTool, + This test verifies that an agent can be created with BingGroundingTool, use it to search the web for current information, and provide responses with URL citations. @@ -69,7 +69,7 @@ def test_agent_bing_grounding(self, **kwargs): model=model, instructions="You are a helpful assistant.", tools=[ - BingGroundingAgentTool( + BingGroundingTool( bing_grounding=BingGroundingSearchToolParameters( search_configurations=[ BingGroundingSearchConfiguration(project_connection_id=bing_connection_id) @@ -165,7 +165,7 @@ def test_agent_bing_grounding_multiple_queries(self, **kwargs): model=model, instructions="You are a helpful assistant that provides current information.", tools=[ - BingGroundingAgentTool( + BingGroundingTool( bing_grounding=BingGroundingSearchToolParameters( search_configurations=[ BingGroundingSearchConfiguration(project_connection_id=bing_connection_id) diff --git a/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_code_interpreter.py b/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_code_interpreter.py index 2798aecd66e3..13337934bfd4 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_code_interpreter.py +++ b/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_code_interpreter.py @@ -12,7 +12,7 @@ from azure.ai.projects.models import ( PromptAgentDefinition, CodeInterpreterTool, - CodeInterpreterToolAuto, + CodeInterpreterContainerAuto, ) @@ -54,7 +54,7 @@ def test_agent_code_interpreter_simple_math(self, **kwargs): definition=PromptAgentDefinition( model=model, instructions="You are a helpful assistant that can execute Python code.", - tools=[CodeInterpreterTool(container=CodeInterpreterToolAuto(file_ids=[]))], + tools=[CodeInterpreterTool(container=CodeInterpreterContainerAuto(file_ids=[]))], ), description="Simple code interpreter agent for basic Python execution.", ) @@ -154,7 +154,7 @@ def test_agent_code_interpreter_file_generation(self, **kwargs): definition=PromptAgentDefinition( model=model, instructions="You are a helpful assistant that can analyze data and create visualizations.", - tools=[CodeInterpreterTool(container=CodeInterpreterToolAuto(file_ids=[file.id]))], + tools=[CodeInterpreterTool(container=CodeInterpreterContainerAuto(file_ids=[file.id]))], ), description="Code interpreter agent for file processing and chart generation.", ) diff --git a/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_code_interpreter_async.py b/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_code_interpreter_async.py index 709947e12485..6932f282830b 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_code_interpreter_async.py +++ b/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_code_interpreter_async.py @@ -11,7 +11,7 @@ from azure.ai.projects.models import ( PromptAgentDefinition, CodeInterpreterTool, - CodeInterpreterToolAuto, + CodeInterpreterContainerAuto, ) @@ -40,7 +40,7 @@ async def test_agent_code_interpreter_simple_math_async(self, **kwargs): definition=PromptAgentDefinition( model=model, instructions="You are a helpful assistant that can execute Python code.", - tools=[CodeInterpreterTool(container=CodeInterpreterToolAuto(file_ids=[]))], + tools=[CodeInterpreterTool(container=CodeInterpreterContainerAuto(file_ids=[]))], ), description="Simple code interpreter agent for basic Python execution.", ) diff --git a/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_memory_search.py b/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_memory_search.py index 96ba06de9aa6..bef4555d04fb 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_memory_search.py +++ b/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_memory_search.py @@ -12,7 +12,7 @@ from azure.core.exceptions import ResourceNotFoundError from azure.ai.projects.models import ( MemoryStoreDefaultDefinition, - MemorySearchTool, + MemorySearchPreviewTool, PromptAgentDefinition, MemoryStoreDefaultOptions, ) @@ -28,7 +28,7 @@ def test_agent_memory_search(self, **kwargs): This test verifies that an agent can: 1. Create a memory store with chat and embedding models - 2. Use MemorySearchTool to store user preferences/information + 2. Use MemorySearchPreviewTool to store user preferences/information 3. Retrieve stored memories across different conversations 4. Answer questions based on previously stored context @@ -42,7 +42,7 @@ def test_agent_memory_search(self, **kwargs): POST /conversations openai_client.conversations.create() # Test focus: - POST /openai/responses openai_client.responses.create() (with MemorySearchTool) + POST /openai/responses openai_client.responses.create() (with MemorySearchPreviewTool) # Teardown: DELETE /conversations/{conversation_id} openai_client.conversations.delete() @@ -104,7 +104,7 @@ def test_agent_memory_search(self, **kwargs): assert memory_store.description == "Test memory store for agent conversations" # Create memory search tool - tool = MemorySearchTool( + tool = MemorySearchPreviewTool( memory_store_name=memory_store.name, scope=scope, update_delay=1, # Wait 1 second for testing; use higher value (300) in production diff --git a/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_memory_search_async.py b/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_memory_search_async.py index b90e06311057..4b9e64c8ce02 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_memory_search_async.py +++ b/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_memory_search_async.py @@ -13,7 +13,7 @@ from azure.core.exceptions import ResourceNotFoundError from azure.ai.projects.models import ( MemoryStoreDefaultDefinition, - MemorySearchTool, + MemorySearchPreviewTool, PromptAgentDefinition, MemoryStoreDefaultOptions, ) @@ -79,7 +79,7 @@ async def test_agent_memory_search_async(self, **kwargs): assert memory_store.description == "Test memory store for agent conversations" # Create memory search tool - tool = MemorySearchTool( + tool = MemorySearchPreviewTool( memory_store_name=memory_store.name, scope=scope, update_delay=1, # Wait 1 second for testing; use higher value (300) in production diff --git a/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_openapi.py b/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_openapi.py index d46727a1ed06..4a862ff31b5a 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_openapi.py +++ b/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_openapi.py @@ -13,7 +13,7 @@ from devtools_testutils import recorded_by_proxy, RecordedTransport from azure.ai.projects.models import ( PromptAgentDefinition, - OpenApiAgentTool, + OpenApiTool, OpenApiFunctionDefinition, OpenApiAnonymousAuthDetails, ) @@ -30,7 +30,7 @@ def test_agent_openapi(self, **kwargs): Test agent with OpenAPI tool capabilities. This test verifies that an agent can: - 1. Use OpenApiAgentTool to call external APIs defined by OpenAPI specifications + 1. Use OpenApiTool to call external APIs defined by OpenAPI specifications 2. Load and parse OpenAPI spec from JSON file 3. Make API calls and incorporate results into responses @@ -42,7 +42,7 @@ def test_agent_openapi(self, **kwargs): POST /agents/{agent_name}/versions project_client.agents.create_version() # Test focus: - POST /openai/responses openai_client.responses.create() (with OpenApiAgentTool) + POST /openai/responses openai_client.responses.create() (with OpenApiTool) # Teardown: DELETE /agents/{agent_name}/versions/{agent_version} project_client.agents.delete_version() @@ -66,7 +66,7 @@ def test_agent_openapi(self, **kwargs): openapi_weather = jsonref.loads(f.read()) # Create OpenAPI tool - tool = OpenApiAgentTool( + tool = OpenApiTool( openapi=OpenApiFunctionDefinition( name="get_weather", spec=openapi_weather, diff --git a/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_openapi_async.py b/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_openapi_async.py index 2ade22530a0d..d56ea15d7c52 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_openapi_async.py +++ b/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_openapi_async.py @@ -14,7 +14,7 @@ from devtools_testutils import RecordedTransport from azure.ai.projects.models import ( PromptAgentDefinition, - OpenApiAgentTool, + OpenApiTool, OpenApiFunctionDefinition, OpenApiAnonymousAuthDetails, ) @@ -46,7 +46,7 @@ async def test_agent_openapi_async(self, **kwargs): openapi_weather = jsonref.loads(f.read()) # Create OpenAPI tool - tool = OpenApiAgentTool( + tool = OpenApiTool( openapi=OpenApiFunctionDefinition( name="get_weather", spec=openapi_weather, diff --git a/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_tools_with_conversations.py b/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_tools_with_conversations.py index 0a8940c3c393..636e85370bb4 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_tools_with_conversations.py +++ b/sdk/ai/azure-ai-projects/tests/agents/tools/test_agent_tools_with_conversations.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. @@ -17,7 +18,7 @@ FunctionTool, FileSearchTool, CodeInterpreterTool, - CodeInterpreterToolAuto, + CodeInterpreterContainerAuto, PromptAgentDefinition, ) from openai.types.responses.response_input_param import FunctionCallOutput, ResponseInputParam @@ -328,7 +329,7 @@ def test_code_interpreter_with_conversation(self, **kwargs): definition=PromptAgentDefinition( model=model, instructions="You are a data analysis assistant. Use Python to perform calculations.", - tools=[CodeInterpreterTool(container=CodeInterpreterToolAuto(file_ids=[]))], + tools=[CodeInterpreterTool(container=CodeInterpreterContainerAuto(file_ids=[]))], ), description="Code interpreter agent for conversation testing.", ) @@ -427,7 +428,7 @@ def test_code_interpreter_with_file_in_conversation(self, **kwargs): definition=PromptAgentDefinition( model=model, instructions="You are a helpful assistant.", - tools=[CodeInterpreterTool(container=CodeInterpreterToolAuto(file_ids=[uploaded_file.id]))], + tools=[CodeInterpreterTool(container=CodeInterpreterContainerAuto(file_ids=[uploaded_file.id]))], ), description="Code interpreter agent for data analysis and visualization.", ) diff --git a/sdk/ai/azure-ai-projects/tests/conftest.py b/sdk/ai/azure-ai-projects/tests/conftest.py index 982d1b3ab7bb..8c35abe82c2b 100644 --- a/sdk/ai/azure-ai-projects/tests/conftest.py +++ b/sdk/ai/azure-ai-projects/tests/conftest.py @@ -124,15 +124,9 @@ def sanitize_url_paths(): # Sanitize Unix timestamps in eval names (from sample_redteam_evaluations.py) # Pattern 1: "Red Team Agent Safety Evaluation -" - add_general_regex_sanitizer( - regex=r"Evaluation -\d{10}", - value="Evaluation -SANITIZED-TS" - ) + add_general_regex_sanitizer(regex=r"Evaluation -\d{10}", value="Evaluation -SANITIZED-TS") # Pattern 2: "Eval Run for -" (agent name already sanitized) - add_general_regex_sanitizer( - regex=r"sanitized-agent-name -\d{10}", - value="sanitized-agent-name -SANITIZED-TS" - ) + add_general_regex_sanitizer(regex=r"sanitized-agent-name -\d{10}", value="sanitized-agent-name -SANITIZED-TS") # Sanitize API key from service response (this includes Application Insights connection string) add_body_key_sanitizer(json_path="credentials.key", value="sanitized-api-key") diff --git a/sdk/ai/azure-ai-projects/tests/finetuning/test_finetuning.py b/sdk/ai/azure-ai-projects/tests/finetuning/test_finetuning.py index 67e8839f35bc..88dfd1c265b4 100644 --- a/sdk/ai/azure-ai-projects/tests/finetuning/test_finetuning.py +++ b/sdk/ai/azure-ai-projects/tests/finetuning/test_finetuning.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. diff --git a/sdk/ai/azure-ai-projects/tests/finetuning/test_finetuning_async.py b/sdk/ai/azure-ai-projects/tests/finetuning/test_finetuning_async.py index 1778fc35317a..b609c15eaf2a 100644 --- a/sdk/ai/azure-ai-projects/tests/finetuning/test_finetuning_async.py +++ b/sdk/ai/azure-ai-projects/tests/finetuning/test_finetuning_async.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. diff --git a/sdk/ai/azure-ai-projects/tests/test_base.py b/sdk/ai/azure-ai-projects/tests/test_base.py index 895708f87aba..45945bc99c74 100644 --- a/sdk/ai/azure-ai-projects/tests/test_base.py +++ b/sdk/ai/azure-ai-projects/tests/test_base.py @@ -24,13 +24,13 @@ DeploymentType, Index, IndexType, - ItemContentType, + InputContentType, ItemResource, - ItemType, + InputItemType, ModelDeployment, - ResponsesMessageRole, ) from openai.types.responses import Response +from openai.types.conversations import ConversationItem from azure.ai.projects.models._models import AgentDetails, AgentVersionDetails from devtools_testutils import AzureRecordedTestCase, EnvironmentVariableLoader from azure.ai.projects import AIProjectClient as AIProjectClient @@ -542,17 +542,17 @@ def _validate_agent( def _validate_conversation_item( self, - item: ItemResource, + item: ConversationItem, *, - expected_type: Optional[ItemType] = None, + expected_type: Optional[str] = None, expected_id: Optional[str] = None, - expected_role: Optional[ResponsesMessageRole] = None, - expected_content_type: Optional[ItemContentType] = None, + expected_role: Optional[str] = None, + expected_content_type: Optional[InputContentType] = None, expected_content_text: Optional[str] = None, ) -> None: assert item - # From ItemResource: + # From ConversationItem: if expected_type: assert item.type == expected_type else: @@ -563,7 +563,7 @@ def _validate_conversation_item( assert item.id # From ResponsesMessageItemResource: - if expected_type == ItemType.MESSAGE: + if expected_type == "message": assert item.status == "completed" if expected_role: assert item.role == expected_role @@ -579,7 +579,7 @@ def _validate_conversation_item( if expected_content_text: assert item.content[0].text == expected_content_text print( - f"Conversation item validated (id: {item.id}, type: {item.type}, role: {item.role if item.type == ItemType.MESSAGE else 'N/A'})" + f"Conversation item validated (id: {item.id}, type: {item.type}, role: {item.role if item.type == 'message' else 'N/A'})" ) @classmethod diff --git a/sdk/ai/azure-ai-projects/tsp-location.yaml b/sdk/ai/azure-ai-projects/tsp-location.yaml deleted file mode 100644 index 80b8dc2ae282..000000000000 --- a/sdk/ai/azure-ai-projects/tsp-location.yaml +++ /dev/null @@ -1,4 +0,0 @@ -directory: specification/ai/Azure.AI.Projects -commit: 78bfd335c31b8764578cfb9840f3b74349c10354 -repo: Azure/azure-rest-api-specs-pr -additionalDirectories: From d80e98e00794562736d999caf756fd2adca8142d Mon Sep 17 00:00:00 2001 From: Peter Wu <162184229+weirongw23-msft@users.noreply.github.com> Date: Mon, 26 Jan 2026 14:21:41 -0500 Subject: [PATCH 15/18] [Storage] STG 101 Features (#44320) --- sdk/storage/azure-storage-blob/CHANGELOG.md | 24 +- sdk/storage/azure-storage-blob/assets.json | 2 +- .../azure/apiview-properties.json | 1 + .../azure/storage/blob/_blob_client.py | 73 ++++-- .../azure/storage/blob/_blob_client.pyi | 10 +- .../storage/blob/_blob_client_helpers.py | 56 ++++- .../storage/blob/_blob_service_client.py | 12 +- .../storage/blob/_blob_service_client.pyi | 8 +- .../azure/storage/blob/_container_client.py | 4 +- .../blob/_generated/_azure_blob_storage.py | 9 +- .../storage/blob/_generated/_configuration.py | 13 +- .../blob/_generated/_utils/serialization.py | 17 +- .../_generated/aio/_azure_blob_storage.py | 9 +- .../blob/_generated/aio/_configuration.py | 13 +- .../aio/operations/_append_blob_operations.py | 27 ++- .../aio/operations/_blob_operations.py | 62 ++--- .../aio/operations/_block_blob_operations.py | 48 +++- .../aio/operations/_container_operations.py | 36 +-- .../aio/operations/_page_blob_operations.py | 37 ++- .../aio/operations/_service_operations.py | 16 +- .../blob/_generated/models/__init__.py | 2 + .../models/_azure_blob_storage_enums.py | 5 +- .../blob/_generated/models/_models_py3.py | 65 +++++- .../operations/_append_blob_operations.py | 56 +++-- .../_generated/operations/_blob_operations.py | 126 +++++----- .../operations/_block_blob_operations.py | 98 ++++++-- .../operations/_container_operations.py | 73 +++--- .../operations/_page_blob_operations.py | 78 +++++-- .../operations/_service_operations.py | 49 ++-- .../azure/storage/blob/_serialize.py | 1 + .../azure/storage/blob/_shared/models.py | 14 +- .../storage/blob/_shared/response_handlers.py | 1 + .../blob/_shared/shared_access_signature.py | 30 +++ .../storage/blob/_shared_access_signature.py | 96 +++++++- .../storage/blob/aio/_blob_client_async.py | 71 ++++-- .../storage/blob/aio/_blob_client_async.pyi | 10 +- .../blob/aio/_blob_service_client_async.py | 12 +- .../blob/aio/_blob_service_client_async.pyi | 8 +- .../blob/aio/_container_client_async.py | 4 +- sdk/storage/azure-storage-blob/setup.py | 4 +- .../azure-storage-blob/swagger/README.md | 2 +- .../tests/fake_credentials.py | 2 + .../tests/test_common_blob.py | 208 ++++++++++++----- .../tests/test_common_blob_async.py | 218 +++++++++++++----- .../azure-storage-blob/tests/test_cpk.py | 194 +++++++++++++++- .../tests/test_cpk_async.py | 195 +++++++++++++++- .../azure-storage-file-datalake/CHANGELOG.md | 16 +- .../filedatalake/_data_lake_file_client.py | 12 +- .../filedatalake/_data_lake_file_client.pyi | 4 +- .../_data_lake_file_client_helpers.py | 2 +- .../filedatalake/_data_lake_service_client.py | 4 + .../_data_lake_service_client.pyi | 8 +- .../_generated/_utils/serialization.py | 17 +- .../aio/operations/_path_operations.py | 8 +- .../_generated/operations/_path_operations.py | 16 +- .../azure/storage/filedatalake/_models.py | 1 + .../azure/storage/filedatalake/_serialize.py | 1 + .../storage/filedatalake/_shared/models.py | 14 +- .../filedatalake/_shared/response_handlers.py | 1 + .../_shared/shared_access_signature.py | 30 +++ .../filedatalake/_shared_access_signature.py | 34 ++- .../aio/_data_lake_file_client_async.py | 14 +- .../aio/_data_lake_file_client_async.pyi | 5 +- .../aio/_data_lake_service_client_async.py | 4 + .../aio/_data_lake_service_client_async.pyi | 8 +- .../azure-storage-file-datalake/setup.py | 4 +- .../tests/test_file.py | 57 +++++ .../tests/test_file_async.py | 57 +++++ .../tests/test_file_system.py | 29 ++- .../tests/test_file_system_async.py | 32 ++- .../azure-storage-file-share/CHANGELOG.md | 10 +- .../storage/fileshare/_directory_client.py | 14 +- .../azure/storage/fileshare/_file_client.py | 28 ++- .../azure/storage/fileshare/_file_client.pyi | 2 +- .../_generated/_azure_file_storage.py | 7 +- .../fileshare/_generated/_configuration.py | 11 +- .../_generated/_utils/serialization.py | 17 +- .../_generated/aio/_azure_file_storage.py | 7 +- .../_generated/aio/_configuration.py | 11 +- .../aio/operations/_directory_operations.py | 24 +- .../aio/operations/_file_operations.py | 52 ++--- .../aio/operations/_service_operations.py | 8 +- .../aio/operations/_share_operations.py | 38 +-- .../models/_azure_file_storage_enums.py | 7 + .../_generated/models/_models_py3.py | 19 +- .../operations/_directory_operations.py | 44 ++-- .../_generated/operations/_file_operations.py | 102 ++++---- .../operations/_service_operations.py | 23 +- .../operations/_share_operations.py | 72 +++--- .../azure/storage/fileshare/_serialize.py | 1 + .../azure/storage/fileshare/_share_client.py | 14 +- .../fileshare/_share_service_client.py | 22 +- .../fileshare/_share_service_client.pyi | 1 + .../azure/storage/fileshare/_shared/models.py | 14 +- .../fileshare/_shared/response_handlers.py | 1 + .../_shared/shared_access_signature.py | 30 +++ .../fileshare/_shared_access_signature.py | 4 + .../fileshare/aio/_directory_client_async.py | 14 +- .../fileshare/aio/_file_client_async.py | 27 ++- .../fileshare/aio/_file_client_async.pyi | 2 +- .../fileshare/aio/_share_client_async.py | 14 +- .../aio/_share_service_client_async.py | 22 +- .../aio/_share_service_client_async.pyi | 1 + sdk/storage/azure-storage-file-share/setup.py | 4 +- .../swagger/README.md | 2 +- .../tests/test_share.py | 21 +- .../tests/test_share_async.py | 21 +- sdk/storage/azure-storage-queue/CHANGELOG.md | 9 +- .../queue/_generated/_azure_queue_storage.py | 9 +- .../queue/_generated/_configuration.py | 13 +- .../queue/_generated/_utils/serialization.py | 17 +- .../_generated/aio/_azure_queue_storage.py | 9 +- .../queue/_generated/aio/_configuration.py | 13 +- .../aio/operations/_message_id_operations.py | 4 +- .../aio/operations/_messages_operations.py | 8 +- .../aio/operations/_queue_operations.py | 12 +- .../aio/operations/_service_operations.py | 10 +- .../queue/_generated/models/_models_py3.py | 19 +- .../operations/_message_id_operations.py | 10 +- .../operations/_messages_operations.py | 17 +- .../operations/_queue_operations.py | 27 +-- .../operations/_service_operations.py | 36 +-- .../azure/storage/queue/_queue_client.py | 5 +- .../storage/queue/_queue_service_client.py | 20 +- .../azure/storage/queue/_serialize.py | 1 + .../azure/storage/queue/_shared/models.py | 14 +- .../queue/_shared/response_handlers.py | 1 + .../queue/_shared/shared_access_signature.py | 30 +++ .../storage/queue/_shared_access_signature.py | 3 + .../storage/queue/aio/_queue_client_async.py | 5 +- .../queue/aio/_queue_service_client_async.py | 20 +- sdk/storage/azure-storage-queue/setup.py | 4 +- .../azure-storage-queue/swagger/README.md | 2 +- .../azure-storage-queue/tests/test_queue.py | 18 +- .../tests/test_queue_async.py | 18 +- 135 files changed, 2633 insertions(+), 909 deletions(-) diff --git a/sdk/storage/azure-storage-blob/CHANGELOG.md b/sdk/storage/azure-storage-blob/CHANGELOG.md index 265973f181e7..a3836f9133c9 100644 --- a/sdk/storage/azure-storage-blob/CHANGELOG.md +++ b/sdk/storage/azure-storage-blob/CHANGELOG.md @@ -1,8 +1,28 @@ # Release History -## 12.29.0b1 (Unreleased) +## 12.29.0b1 (2026-01-27) + +### Features Added +- Added support for service version 2026-04-06. +- Added support for error code `INCREMENTAL_COPY_OF_EARLIER_SNAPSHOT_NOT_ALLOWED`. +This replaces `INCREMENTAL_COPY_OF_EARLIER_VERSION_SNAPSHOT_NOT_ALLOWED` which has been deprecated. +- Added support for the keywords `access_tier_if_modified_since` and `access_tier_if_unmodified_since` to +conditionally perform `BlobClient.delete_blob` operation. +- Added support for the keyword `source_cpk` for `BlobClient`'s `upload_blob_from_url`, +`stage_block_from_url`, `upload_pages_from_url`, and `append_block_from_url` APIs +to re-encrypt data automatically by the service through a `CustomerProvidedEncryptionKey`. +- Added support for the keyword `user_delegation_tid` to `BlobServiceClient.get_user_delegation_key` API, which +can be used in `generate_blob_sas` and `generate_container_sas` to specify the Tenant ID that is authorized +to use the generated SAS URL. Note that `user_delegation_tid` must be used together with `user_delegation_oid`. +- Added support for the keyword `request_headers` to `generate_blob_sas` and `generate_container_sas`, +which specifies a set of headers and their corresponding values that must be +present in the request header when using the generated SAS. +- Added support for the keyword `request_query_params` to `generate_blob_sas` and `generate_container_sas`, +which specifies a set of query parameters and their corresponding values that must be +present in the request URL when using the generated SAS. -### Features Added +### Other Changes +- Bumped minimum `azure-core` dependency to 1.37.0. ## 12.28.0 (2026-01-06) diff --git a/sdk/storage/azure-storage-blob/assets.json b/sdk/storage/azure-storage-blob/assets.json index d4d397249c45..f94cd70f1a2d 100644 --- a/sdk/storage/azure-storage-blob/assets.json +++ b/sdk/storage/azure-storage-blob/assets.json @@ -2,5 +2,5 @@ "AssetsRepo": "Azure/azure-sdk-assets", "AssetsRepoPrefixPath": "python", "TagPrefix": "python/storage/azure-storage-blob", - "Tag": "python/storage/azure-storage-blob_80e628b209" + "Tag": "python/storage/azure-storage-blob_16c5acad24" } diff --git a/sdk/storage/azure-storage-blob/azure/apiview-properties.json b/sdk/storage/azure-storage-blob/azure/apiview-properties.json index e37ee8ebfdd2..64ad80365c09 100644 --- a/sdk/storage/azure-storage-blob/azure/apiview-properties.json +++ b/sdk/storage/azure-storage-blob/azure/apiview-properties.json @@ -47,6 +47,7 @@ "azure.storage.blob.models.RetentionPolicy": null, "azure.storage.blob.models.SequenceNumberAccessConditions": null, "azure.storage.blob.models.SignedIdentifier": null, + "azure.storage.blob.models.SourceCpkInfo": null, "azure.storage.blob.models.SourceModifiedAccessConditions": null, "azure.storage.blob.models.StaticWebsite": null, "azure.storage.blob.models.StorageError": null, diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client.py index 1e3f7ea3d6ca..54abb33c80a1 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client.py @@ -186,8 +186,7 @@ def __init__( self._raw_credential = credential if credential else sas_token self._query_str, credential = self._format_query_string(sas_token, credential, snapshot=self.snapshot) super(BlobClient, self).__init__(parsed_url, service='blob', credential=credential, **kwargs) - self._client = AzureBlobStorage(self.url, base_url=self.url, pipeline=self._pipeline) - self._client._config.version = get_api_version(kwargs) # type: ignore [assignment] + self._client = AzureBlobStorage(self.url, get_api_version(kwargs), base_url=self.url, pipeline=self._pipeline) self._configure_encryption(kwargs) def __enter__(self) -> Self: @@ -427,6 +426,10 @@ def upload_blob_from_url( Use of customer-provided keys must be done over HTTPS. As the encryption key itself is provided in the request, a secure connection must be established to transfer the key. + :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey source_cpk: + Specifies the source encryption key to use to decrypt + the source data provided in the request. + Use of customer-provided keys must be done over HTTPS. :keyword str encryption_scope: A predefined encryption scope used to encrypt the data on the service. An encryption scope can be created using the Management API and referenced here by name. If a default @@ -450,8 +453,9 @@ def upload_blob_from_url( :return: Blob-updated property Dict (Etag and last modified) :rtype: Dict[str, Any] """ - if kwargs.get('cpk') and self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") + if self.scheme.lower() != 'https': + if kwargs.get('cpk') or kwargs.get('source_cpk'): + raise ValueError("Customer provided encryption key must be used over HTTPS.") options = _upload_blob_from_url_options( source_url=source_url, metadata=metadata, @@ -473,7 +477,7 @@ def upload_blob( """Creates a new blob from a data source with automatic chunking. :param data: The blob data to upload. - :type data: Union[bytes, str, Iterable[AnyStr], IO[AnyStr]] + :type data: Union[bytes, str, Iterable[AnyStr], IO[bytes]] :param ~azure.storage.blob.BlobType blob_type: The type of the blob. This can be either BlockBlob, PageBlob or AppendBlob. The default value is BlockBlob. :param int length: @@ -934,6 +938,18 @@ def delete_blob(self, delete_snapshots: Optional[str] = None, **kwargs: Any) -> .. versionadded:: 12.4.0 + :keyword ~datetime.datetime access_tier_if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the access-tier has been modified since the specified date/time. + :keyword ~datetime.datetime access_tier_if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the access-tier has been modified since the specified date/time. :keyword int timeout: Sets the server-side timeout for the operation in seconds. For more details see https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. @@ -956,7 +972,8 @@ def delete_blob(self, delete_snapshots: Optional[str] = None, **kwargs: Any) -> snapshot=self.snapshot, version_id=get_version_id(self.version_id, kwargs), delete_snapshots=delete_snapshots, - **kwargs) + **kwargs + ) try: self._client.blob.delete(**options) except HttpResponseError as error: @@ -1976,7 +1993,7 @@ def set_standard_blob_tier(self, standard_blob_tier: Union[str, "StandardBlobTie @distributed_trace def stage_block( self, block_id: str, - data: Union[bytes, str, Iterable[AnyStr], IO[AnyStr]], + data: Union[bytes, Iterable[bytes], IO[bytes]], length: Optional[int] = None, **kwargs: Any ) -> Dict[str, Any]: @@ -1986,8 +2003,10 @@ def stage_block( The string should be less than or equal to 64 bytes in size. For a given blob, the block_id must be the same size for each block. :param data: The blob data. - :type data: Union[bytes, str, Iterable[AnyStr], IO[AnyStr]] - :param int length: Size of the block. + :type data: Union[bytes, Iterable[bytes], IO[bytes]] + :param int length: + Size of the block. Optional if the length of data can be determined. For Iterable and IO, if the + length is not provided and cannot be determined, all data will be read into memory. :keyword bool validate_content: If true, calculates an MD5 hash for each chunk of the blob. The storage service checks the hash of the content that has arrived with the hash @@ -2071,6 +2090,10 @@ def stage_block_from_url( Use of customer-provided keys must be done over HTTPS. As the encryption key itself is provided in the request, a secure connection must be established to transfer the key. + :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey source_cpk: + Specifies the source encryption key to use to decrypt + the source data provided in the request. + Use of customer-provided keys must be done over HTTPS. :keyword str encryption_scope: A predefined encryption scope used to encrypt the data on the service. An encryption scope can be created using the Management API and referenced here by name. If a default @@ -2100,8 +2123,9 @@ def stage_block_from_url( :return: Blob property dict. :rtype: dict[str, Any] """ - if kwargs.get('cpk') and self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") + if self.scheme.lower() != 'https': + if kwargs.get('cpk') or kwargs.get('source_cpk'): + raise ValueError("Customer provided encryption key must be used over HTTPS.") options = _stage_block_from_url_options( block_id=block_id, source_url=source_url, @@ -2989,6 +3013,10 @@ def upload_pages_from_url( Use of customer-provided keys must be done over HTTPS. As the encryption key itself is provided in the request, a secure connection must be established to transfer the key. + :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey source_cpk: + Specifies the source encryption key to use to decrypt + the source data provided in the request. + Use of customer-provided keys must be done over HTTPS. :keyword str encryption_scope: A predefined encryption scope used to encrypt the data on the service. An encryption scope can be created using the Management API and referenced here by name. If a default @@ -3020,8 +3048,9 @@ def upload_pages_from_url( """ if self.require_encryption or (self.key_encryption_key is not None): raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) - if kwargs.get('cpk') and self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") + if self.scheme.lower() != 'https': + if kwargs.get('cpk') or kwargs.get('source_cpk'): + raise ValueError("Customer provided encryption key must be used over HTTPS.") options = _upload_pages_from_url_options( source_url=source_url, offset=offset, @@ -3114,17 +3143,18 @@ def clear_page(self, offset: int, length: int, **kwargs: Any) -> Dict[str, Union @distributed_trace def append_block( - self, data: Union[bytes, str, Iterable[AnyStr], IO[AnyStr]], + self, data: Union[bytes, Iterable[bytes], IO[bytes]], length: Optional[int] = None, **kwargs: Any ) -> Dict[str, Union[str, datetime, int]]: """Commits a new block of data to the end of the existing append blob. :param data: - Content of the block. This can be bytes, text, an iterable or a file-like object. - :type data: bytes or str or Iterable + Content of the block. + :type data: Union[bytes, Iterable[bytes], IO[bytes]] :param int length: - Size of the block in bytes. + Size of the block. Optional if the length of data can be determined. For Iterable and IO, if the + length is not provided and cannot be determined, all data will be read into memory. :keyword bool validate_content: If true, calculates an MD5 hash of the block content. The storage service checks the hash of the content that has arrived @@ -3290,6 +3320,10 @@ def append_block_from_url( Use of customer-provided keys must be done over HTTPS. As the encryption key itself is provided in the request, a secure connection must be established to transfer the key. + :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey source_cpk: + Specifies the source encryption key to use to decrypt + the source data provided in the request. + Use of customer-provided keys must be done over HTTPS. :keyword str encryption_scope: A predefined encryption scope used to encrypt the data on the service. An encryption scope can be created using the Management API and referenced here by name. If a default @@ -3321,8 +3355,9 @@ def append_block_from_url( """ if self.require_encryption or (self.key_encryption_key is not None): raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) - if kwargs.get('cpk') and self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") + if self.scheme.lower() != 'https': + if kwargs.get('cpk') or kwargs.get('source_cpk'): + raise ValueError("Customer provided encryption key must be used over HTTPS.") options = _append_block_from_url_options( copy_source_url=copy_source_url, source_offset=source_offset, diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client.pyi b/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client.pyi index 4b50f447948e..3f74b8b211ad 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client.pyi +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client.pyi @@ -155,6 +155,7 @@ class BlobClient(StorageAccountHostsMixin, StorageEncryptionMixin): timeout: Optional[int] = None, content_settings: Optional[ContentSettings] = None, cpk: Optional[CustomerProvidedEncryptionKey] = None, + source_cpk: Optional[CustomerProvidedEncryptionKey] = None, encryption_scope: Optional[str] = None, standard_blob_tier: Optional[StandardBlobTier] = None, source_authorization: Optional[str] = None, @@ -290,6 +291,8 @@ class BlobClient(StorageAccountHostsMixin, StorageEncryptionMixin): etag: Optional[str] = None, match_condition: Optional[MatchConditions] = None, if_tags_match_condition: Optional[str] = None, + access_tier_if_modified_since: Optional[datetime] = None, + access_tier_if_unmodified_since: Optional[datetime] = None, timeout: Optional[int] = None, **kwargs: Any ) -> None: ... @@ -480,7 +483,7 @@ class BlobClient(StorageAccountHostsMixin, StorageEncryptionMixin): def stage_block( self, block_id: str, - data: Union[bytes, str, Iterable[AnyStr], IO[AnyStr]], + data: Union[bytes, Iterable[bytes], IO[bytes]], length: Optional[int] = None, *, validate_content: Optional[bool] = None, @@ -502,6 +505,7 @@ class BlobClient(StorageAccountHostsMixin, StorageEncryptionMixin): *, lease: Optional[Union[BlobLeaseClient, str]] = None, cpk: Optional[CustomerProvidedEncryptionKey] = None, + source_cpk: Optional[CustomerProvidedEncryptionKey] = None, encryption_scope: Optional[str] = None, source_authorization: Optional[str] = None, source_token_intent: Optional[Literal["backup"]] = None, @@ -705,6 +709,7 @@ class BlobClient(StorageAccountHostsMixin, StorageEncryptionMixin): match_condition: Optional[MatchConditions] = None, if_tags_match_condition: Optional[str] = None, cpk: Optional[CustomerProvidedEncryptionKey] = None, + source_cpk: Optional[CustomerProvidedEncryptionKey] = None, encryption_scope: Optional[str] = None, source_authorization: Optional[str] = None, source_token_intent: Optional[Literal["backup"]] = None, @@ -733,7 +738,7 @@ class BlobClient(StorageAccountHostsMixin, StorageEncryptionMixin): @distributed_trace def append_block( self, - data: Union[bytes, str, Iterable[AnyStr], IO[AnyStr]], + data: Union[bytes, Iterable[bytes], IO[bytes]], length: Optional[int] = None, *, validate_content: Optional[bool] = None, @@ -772,6 +777,7 @@ class BlobClient(StorageAccountHostsMixin, StorageEncryptionMixin): source_etag: Optional[str] = None, source_match_condition: Optional[MatchConditions] = None, cpk: Optional[CustomerProvidedEncryptionKey] = None, + source_cpk: Optional[CustomerProvidedEncryptionKey] = None, encryption_scope: Optional[str] = None, source_authorization: Optional[str] = None, source_token_intent: Optional[Literal["backup"]] = None, diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client_helpers.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client_helpers.py index 16e418835cd4..52a6a57ca5ed 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client_helpers.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client_helpers.py @@ -24,7 +24,8 @@ DeleteSnapshotsOptionType, ModifiedAccessConditions, QueryRequest, - SequenceNumberAccessConditions + SequenceNumberAccessConditions, + SourceCpkInfo ) from ._models import ( BlobBlock, @@ -214,6 +215,14 @@ def _upload_blob_from_url_options(source_url: str, **kwargs: Any) -> Dict[str, A if cpk: cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, encryption_algorithm=cpk.algorithm) + source_cpk = kwargs.pop('source_cpk', None) + source_cpk_info = None + if source_cpk: + source_cpk_info = SourceCpkInfo( + source_encryption_key=source_cpk.key_value, + source_encryption_key_sha256=source_cpk.key_hash, + source_encryption_algorithm=source_cpk.algorithm + ) options = { 'copy_source_authorization': source_authorization, @@ -230,6 +239,7 @@ def _upload_blob_from_url_options(source_url: str, **kwargs: Any) -> Dict[str, A 'source_modified_access_conditions': get_source_conditions(kwargs), 'cpk_info': cpk_info, 'cpk_scope_info': get_cpk_scope_info(kwargs), + 'source_cpk_info': source_cpk_info, 'headers': headers, } options.update(kwargs) @@ -393,7 +403,8 @@ def _generic_delete_blob_options(delete_snapshots: Optional[str] = None, **kwarg 'snapshot': kwargs.pop('snapshot', None), # this is added for delete_blobs 'delete_snapshots': delete_snapshots or None, 'lease_access_conditions': access_conditions, - 'modified_access_conditions': mod_conditions} + 'modified_access_conditions': mod_conditions + } options.update(kwargs) return options @@ -757,6 +768,15 @@ def _stage_block_from_url_options( if cpk: cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, encryption_algorithm=cpk.algorithm) + source_cpk = kwargs.pop('source_cpk', None) + source_cpk_info = None + if source_cpk: + source_cpk_info = SourceCpkInfo( + source_encryption_key=source_cpk.key_value, + source_encryption_key_sha256=source_cpk.key_hash, + source_encryption_algorithm=source_cpk.algorithm + ) + options = { 'copy_source_authorization': source_authorization, 'file_request_intent': source_token_intent, @@ -769,6 +789,7 @@ def _stage_block_from_url_options( 'lease_access_conditions': access_conditions, 'cpk_scope_info': cpk_scope_info, 'cpk_info': cpk_info, + 'source_cpk_info': source_cpk_info, 'cls': return_response_headers, } options.update(kwargs) @@ -1040,6 +1061,14 @@ def _upload_pages_from_url_options( if cpk: cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, encryption_algorithm=cpk.algorithm) + source_cpk = kwargs.pop('source_cpk', None) + source_cpk_info = None + if source_cpk: + source_cpk_info = SourceCpkInfo( + source_encryption_key=source_cpk.key_value, + source_encryption_key_sha256=source_cpk.key_hash, + source_encryption_algorithm=source_cpk.algorithm + ) options = { 'copy_source_authorization': source_authorization, @@ -1056,7 +1085,9 @@ def _upload_pages_from_url_options( 'source_modified_access_conditions': source_mod_conditions, 'cpk_scope_info': cpk_scope_info, 'cpk_info': cpk_info, - 'cls': return_response_headers} + 'source_cpk_info': source_cpk_info, + 'cls': return_response_headers + } options.update(kwargs) return options @@ -1181,8 +1212,19 @@ def _append_block_from_url_options( cpk = kwargs.pop('cpk', None) cpk_info = None if cpk: - cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cpk.algorithm) + cpk_info = CpkInfo( + encryption_key=cpk.key_value, + encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm + ) + source_cpk = kwargs.pop('source_cpk', None) + source_cpk_info = None + if source_cpk: + source_cpk_info = SourceCpkInfo( + source_encryption_key=source_cpk.key_value, + source_encryption_key_sha256=source_cpk.key_hash, + source_encryption_algorithm=source_cpk.algorithm + ) options = { 'copy_source_authorization': source_authorization, @@ -1198,8 +1240,10 @@ def _append_block_from_url_options( 'source_modified_access_conditions': source_mod_conditions, 'cpk_scope_info': cpk_scope_info, 'cpk_info': cpk_info, + 'source_cpk_info': source_cpk_info, 'cls': return_response_headers, - 'timeout': kwargs.pop('timeout', None)} + 'timeout': kwargs.pop('timeout', None) + } options.update(kwargs) return options diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_service_client.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_service_client.py index 81b5775ea24d..2333d9558d11 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_service_client.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_service_client.py @@ -127,8 +127,7 @@ def __init__( _, sas_token = parse_query(parsed_url.query) self._query_str, credential = self._format_query_string(sas_token, credential) super(BlobServiceClient, self).__init__(parsed_url, service='blob', credential=credential, **kwargs) - self._client = AzureBlobStorage(self.url, base_url=self.url, pipeline=self._pipeline) - self._client._config.version = get_api_version(kwargs) # type: ignore [assignment] + self._client = AzureBlobStorage(self.url, get_api_version(kwargs), base_url=self.url, pipeline=self._pipeline) self._configure_encryption(kwargs) def __enter__(self) -> Self: @@ -227,6 +226,8 @@ def from_connection_string( def get_user_delegation_key( self, key_start_time: "datetime", key_expiry_time: "datetime", + *, + delegated_user_tid: Optional[str] = None, **kwargs: Any ) -> "UserDelegationKey": """ @@ -237,6 +238,7 @@ def get_user_delegation_key( A DateTime value. Indicates when the key becomes valid. :param ~datetime.datetime key_expiry_time: A DateTime value. Indicates when the key stops being valid. + :keyword str delegated_user_tid: The delegated user tenant id in Entra ID. :keyword int timeout: Sets the server-side timeout for the operation in seconds. For more details see https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. @@ -246,7 +248,11 @@ def get_user_delegation_key( :return: The user delegation key. :rtype: ~azure.storage.blob.UserDelegationKey """ - key_info = KeyInfo(start=_to_utc_datetime(key_start_time), expiry=_to_utc_datetime(key_expiry_time)) + key_info = KeyInfo( + start=_to_utc_datetime(key_start_time), + expiry=_to_utc_datetime(key_expiry_time), + delegated_user_tid=delegated_user_tid + ) timeout = kwargs.pop('timeout', None) try: user_delegation_key = self._client.service.get_user_delegation_key(key_info=key_info, diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_service_client.pyi b/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_service_client.pyi index c3f1a6001bd8..526c2bfae18a 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_service_client.pyi +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_service_client.pyi @@ -85,7 +85,13 @@ class BlobServiceClient(StorageAccountHostsMixin, StorageEncryptionMixin): ) -> Self: ... @distributed_trace def get_user_delegation_key( - self, key_start_time: datetime, key_expiry_time: datetime, *, timeout: Optional[int] = None, **kwargs: Any + self, + key_start_time: datetime, + key_expiry_time: datetime, + *, + delegated_user_tid: Optional[str] = None, + timeout: Optional[int] = None, + **kwargs: Any ) -> UserDelegationKey: ... @distributed_trace def get_account_information(self, **kwargs: Any) -> Dict[str, str]: ... diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_container_client.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_container_client.py index e35e9ce9b0f4..0415b58cec0d 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_container_client.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_container_client.py @@ -167,9 +167,7 @@ def close(self) -> None: self._client.close() def _build_generated_client(self) -> AzureBlobStorage: - client = AzureBlobStorage(self.url, base_url=self.url, pipeline=self._pipeline) - client._config.version = self._api_version # type: ignore [assignment] # pylint: disable=protected-access - return client + return AzureBlobStorage(self.url, self._api_version, base_url=self.url, pipeline=self._pipeline) def _format_url(self, hostname): return _format_url( diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/_azure_blob_storage.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/_azure_blob_storage.py index 9ab21c8abf79..482adac5cd92 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/_azure_blob_storage.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/_azure_blob_storage.py @@ -45,17 +45,16 @@ class AzureBlobStorage: # pylint: disable=client-accepts-api-version-keyword :param url: The URL of the service account, container, or blob that is the target of the desired operation. Required. :type url: str + :param version: Specifies the version of the operation to use for this request. Required. + :type version: str :param base_url: Service URL. Required. Default value is "". :type base_url: str - :keyword version: Specifies the version of the operation to use for this request. Default value - is "2026-02-06". Note that overriding this default value may result in unsupported behavior. - :paramtype version: str """ def __init__( # pylint: disable=missing-client-constructor-parameter-credential - self, url: str, base_url: str = "", **kwargs: Any + self, url: str, version: str, base_url: str = "", **kwargs: Any ) -> None: - self._config = AzureBlobStorageConfiguration(url=url, **kwargs) + self._config = AzureBlobStorageConfiguration(url=url, version=version, **kwargs) _policies = kwargs.pop("policies", None) if _policies is None: diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/_configuration.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/_configuration.py index 027308796bf1..21c76b55270d 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/_configuration.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/_configuration.py @@ -6,7 +6,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, Literal +from typing import Any from azure.core.pipeline import policies @@ -22,16 +22,15 @@ class AzureBlobStorageConfiguration: # pylint: disable=too-many-instance-attrib :param url: The URL of the service account, container, or blob that is the target of the desired operation. Required. :type url: str - :keyword version: Specifies the version of the operation to use for this request. Default value - is "2026-02-06". Note that overriding this default value may result in unsupported behavior. - :paramtype version: str + :param version: Specifies the version of the operation to use for this request. Required. + :type version: str """ - def __init__(self, url: str, **kwargs: Any) -> None: - version: Literal["2026-02-06"] = kwargs.pop("version", "2026-02-06") - + def __init__(self, url: str, version: str, **kwargs: Any) -> None: if url is None: raise ValueError("Parameter 'url' must not be None.") + if version is None: + raise ValueError("Parameter 'version' must not be None.") self.url = url self.version = version diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/_utils/serialization.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/_utils/serialization.py index ff543ed937ff..6da830e0cf4a 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/_utils/serialization.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/_utils/serialization.py @@ -821,13 +821,20 @@ def serialize_basic(cls, data, data_type, **kwargs): :param str data_type: Type of object in the iterable. :rtype: str, int, float, bool :return: serialized object + :raises TypeError: raise if data_type is not one of str, int, float, bool. """ custom_serializer = cls._get_custom_serializers(data_type, **kwargs) if custom_serializer: return custom_serializer(data) if data_type == "str": return cls.serialize_unicode(data) - return eval(data_type)(data) # nosec # pylint: disable=eval-used + if data_type == "int": + return int(data) + if data_type == "float": + return float(data) + if data_type == "bool": + return bool(data) + raise TypeError("Unknown basic data type: {}".format(data_type)) @classmethod def serialize_unicode(cls, data): @@ -1757,7 +1764,7 @@ def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return :param str data_type: deserialization data type. :return: Deserialized basic type. :rtype: str, int, float or bool - :raises TypeError: if string format is not valid. + :raises TypeError: if string format is not valid or data_type is not one of str, int, float, bool. """ # If we're here, data is supposed to be a basic type. # If it's still an XML node, take the text @@ -1783,7 +1790,11 @@ def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return if data_type == "str": return self.deserialize_unicode(attr) - return eval(data_type)(attr) # nosec # pylint: disable=eval-used + if data_type == "int": + return int(attr) + if data_type == "float": + return float(attr) + raise TypeError("Unknown basic data type: {}".format(data_type)) @staticmethod def deserialize_unicode(data): diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/_azure_blob_storage.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/_azure_blob_storage.py index d01344b66a32..f7a99bedac3a 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/_azure_blob_storage.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/_azure_blob_storage.py @@ -45,17 +45,16 @@ class AzureBlobStorage: # pylint: disable=client-accepts-api-version-keyword :param url: The URL of the service account, container, or blob that is the target of the desired operation. Required. :type url: str + :param version: Specifies the version of the operation to use for this request. Required. + :type version: str :param base_url: Service URL. Required. Default value is "". :type base_url: str - :keyword version: Specifies the version of the operation to use for this request. Default value - is "2026-02-06". Note that overriding this default value may result in unsupported behavior. - :paramtype version: str """ def __init__( # pylint: disable=missing-client-constructor-parameter-credential - self, url: str, base_url: str = "", **kwargs: Any + self, url: str, version: str, base_url: str = "", **kwargs: Any ) -> None: - self._config = AzureBlobStorageConfiguration(url=url, **kwargs) + self._config = AzureBlobStorageConfiguration(url=url, version=version, **kwargs) _policies = kwargs.pop("policies", None) if _policies is None: diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/_configuration.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/_configuration.py index 59296d0c0fd4..2b70484605eb 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/_configuration.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/_configuration.py @@ -6,7 +6,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, Literal +from typing import Any from azure.core.pipeline import policies @@ -22,16 +22,15 @@ class AzureBlobStorageConfiguration: # pylint: disable=too-many-instance-attrib :param url: The URL of the service account, container, or blob that is the target of the desired operation. Required. :type url: str - :keyword version: Specifies the version of the operation to use for this request. Default value - is "2026-02-06". Note that overriding this default value may result in unsupported behavior. - :paramtype version: str + :param version: Specifies the version of the operation to use for this request. Required. + :type version: str """ - def __init__(self, url: str, **kwargs: Any) -> None: - version: Literal["2026-02-06"] = kwargs.pop("version", "2026-02-06") - + def __init__(self, url: str, version: str, **kwargs: Any) -> None: if url is None: raise ValueError("Parameter 'url' must not be None.") + if version is None: + raise ValueError("Parameter 'version' must not be None.") self.url = url self.version = version diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_append_blob_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_append_blob_operations.py index 8e92343afda1..df0d342541fc 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_append_blob_operations.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_append_blob_operations.py @@ -58,7 +58,7 @@ def __init__(self, *args, **kwargs) -> None: self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace_async - async def create( + async def create( # pylint: disable=too-many-locals self, content_length: int, timeout: Optional[int] = None, @@ -176,6 +176,7 @@ async def create( _request = build_create_request( url=self._config.url, content_length=content_length, + version=self._config.version, timeout=timeout, blob_content_type=_blob_content_type, blob_content_encoding=_blob_content_encoding, @@ -200,7 +201,6 @@ async def create( immutability_policy_mode=immutability_policy_mode, legal_hold=legal_hold, blob_type=blob_type, - version=self._config.version, headers=_headers, params=_params, ) @@ -246,7 +246,7 @@ async def create( return cls(pipeline_response, None, response_headers) # type: ignore @distributed_trace_async - async def append_block( + async def append_block( # pylint: disable=too-many-locals self, content_length: int, body: IO[bytes], @@ -357,6 +357,7 @@ async def append_block( _request = build_append_block_request( url=self._config.url, content_length=content_length, + version=self._config.version, timeout=timeout, transactional_content_md5=transactional_content_md5, transactional_content_crc64=transactional_content_crc64, @@ -377,7 +378,6 @@ async def append_block( structured_content_length=structured_content_length, comp=comp, content_type=content_type, - version=self._config.version, content=_content, headers=_headers, params=_params, @@ -435,7 +435,7 @@ async def append_block( return cls(pipeline_response, None, response_headers) # type: ignore @distributed_trace_async - async def append_block_from_url( + async def append_block_from_url( # pylint: disable=too-many-locals self, source_url: str, content_length: int, @@ -453,6 +453,7 @@ async def append_block_from_url( append_position_access_conditions: Optional[_models.AppendPositionAccessConditions] = None, modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, source_modified_access_conditions: Optional[_models.SourceModifiedAccessConditions] = None, + source_cpk_info: Optional[_models.SourceCpkInfo] = None, **kwargs: Any ) -> None: """The Append Block operation commits a new block of data to the end of an existing append blob @@ -503,6 +504,8 @@ async def append_block_from_url( :param source_modified_access_conditions: Parameter group. Default value is None. :type source_modified_access_conditions: ~azure.storage.blob.models.SourceModifiedAccessConditions + :param source_cpk_info: Parameter group. Default value is None. + :type source_cpk_info: ~azure.storage.blob.models.SourceCpkInfo :return: None or the result of cls(response) :rtype: None :raises ~azure.core.exceptions.HttpResponseError: @@ -537,6 +540,9 @@ async def append_block_from_url( _source_if_unmodified_since = None _source_if_match = None _source_if_none_match = None + _source_encryption_key = None + _source_encryption_key_sha256 = None + _source_encryption_algorithm = None if cpk_info is not None: _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key @@ -559,11 +565,16 @@ async def append_block_from_url( _source_if_modified_since = source_modified_access_conditions.source_if_modified_since _source_if_none_match = source_modified_access_conditions.source_if_none_match _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since + if source_cpk_info is not None: + _source_encryption_algorithm = source_cpk_info.source_encryption_algorithm + _source_encryption_key = source_cpk_info.source_encryption_key + _source_encryption_key_sha256 = source_cpk_info.source_encryption_key_sha256 _request = build_append_block_from_url_request( url=self._config.url, source_url=source_url, content_length=content_length, + version=self._config.version, source_range=source_range, source_content_md5=source_content_md5, source_contentcrc64=source_contentcrc64, @@ -588,8 +599,10 @@ async def append_block_from_url( request_id_parameter=request_id_parameter, copy_source_authorization=copy_source_authorization, file_request_intent=file_request_intent, + source_encryption_key=_source_encryption_key, + source_encryption_key_sha256=_source_encryption_key_sha256, + source_encryption_algorithm=_source_encryption_algorithm, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -704,6 +717,7 @@ async def seal( _request = build_seal_request( url=self._config.url, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, lease_id=_lease_id, @@ -713,7 +727,6 @@ async def seal( if_none_match=_if_none_match, append_position=_append_position, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_blob_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_blob_operations.py index 49b134b56312..5645f6b8cdb3 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_blob_operations.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_blob_operations.py @@ -177,6 +177,7 @@ async def download( _request = build_download_request( url=self._config.url, + version=self._config.version, snapshot=snapshot, version_id=version_id, timeout=timeout, @@ -194,7 +195,6 @@ async def download( if_none_match=_if_none_match, if_tags=_if_tags, request_id_parameter=request_id_parameter, - version=self._config.version, headers=_headers, params=_params, ) @@ -482,6 +482,7 @@ async def get_properties( _request = build_get_properties_request( url=self._config.url, + version=self._config.version, snapshot=snapshot, version_id=version_id, timeout=timeout, @@ -495,7 +496,6 @@ async def get_properties( if_none_match=_if_none_match, if_tags=_if_tags, request_id_parameter=request_id_parameter, - version=self._config.version, headers=_headers, params=_params, ) @@ -615,6 +615,8 @@ async def delete( delete_snapshots: Optional[Union[str, _models.DeleteSnapshotsOptionType]] = None, request_id_parameter: Optional[str] = None, blob_delete_type: Literal["Permanent"] = "Permanent", + access_tier_if_modified_since: Optional[datetime.datetime] = None, + access_tier_if_unmodified_since: Optional[datetime.datetime] = None, lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any @@ -660,6 +662,12 @@ async def delete( permanently delete a blob if blob soft delete is enabled. Known values are "Permanent" and None. Default value is "Permanent". :type blob_delete_type: str + :param access_tier_if_modified_since: Specify this header value to operate only on a blob if + the access-tier has been modified since the specified date/time. Default value is None. + :type access_tier_if_modified_since: ~datetime.datetime + :param access_tier_if_unmodified_since: Specify this header value to operate only on a blob if + the access-tier has not been modified since the specified date/time. Default value is None. + :type access_tier_if_unmodified_since: ~datetime.datetime :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions :param modified_access_conditions: Parameter group. Default value is None. @@ -698,6 +706,7 @@ async def delete( _request = build_delete_request( url=self._config.url, + version=self._config.version, snapshot=snapshot, version_id=version_id, timeout=timeout, @@ -710,7 +719,8 @@ async def delete( if_tags=_if_tags, request_id_parameter=request_id_parameter, blob_delete_type=blob_delete_type, - version=self._config.version, + access_tier_if_modified_since=access_tier_if_modified_since, + access_tier_if_unmodified_since=access_tier_if_unmodified_since, headers=_headers, params=_params, ) @@ -777,10 +787,10 @@ async def undelete( _request = build_undelete_request( url=self._config.url, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -858,11 +868,11 @@ async def set_expiry( _request = build_set_expiry_request( url=self._config.url, expiry_options=expiry_options, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, expires_on=expires_on, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -971,6 +981,7 @@ async def set_http_headers( _request = build_set_http_headers_request( url=self._config.url, + version=self._config.version, timeout=timeout, blob_cache_control=_blob_cache_control, blob_content_type=_blob_content_type, @@ -986,7 +997,6 @@ async def set_http_headers( blob_content_disposition=_blob_content_disposition, request_id_parameter=request_id_parameter, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -1088,6 +1098,7 @@ async def set_immutability_policy( _request = build_set_immutability_policy_request( url=self._config.url, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, if_unmodified_since=_if_unmodified_since, @@ -1096,7 +1107,6 @@ async def set_immutability_policy( snapshot=snapshot, version_id=version_id, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -1184,12 +1194,12 @@ async def delete_immutability_policy( _request = build_delete_immutability_policy_request( url=self._config.url, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, snapshot=snapshot, version_id=version_id, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -1275,12 +1285,12 @@ async def set_legal_hold( _request = build_set_legal_hold_request( url=self._config.url, legal_hold=legal_hold, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, snapshot=snapshot, version_id=version_id, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -1398,6 +1408,7 @@ async def set_metadata( _request = build_set_metadata_request( url=self._config.url, + version=self._config.version, timeout=timeout, metadata=metadata, lease_id=_lease_id, @@ -1412,7 +1423,6 @@ async def set_metadata( if_tags=_if_tags, request_id_parameter=request_id_parameter, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -1521,6 +1531,7 @@ async def acquire_lease( _request = build_acquire_lease_request( url=self._config.url, + version=self._config.version, timeout=timeout, duration=duration, proposed_lease_id=proposed_lease_id, @@ -1532,7 +1543,6 @@ async def acquire_lease( request_id_parameter=request_id_parameter, comp=comp, action=action, - version=self._config.version, headers=_headers, params=_params, ) @@ -1626,6 +1636,7 @@ async def release_lease( _request = build_release_lease_request( url=self._config.url, lease_id=lease_id, + version=self._config.version, timeout=timeout, if_modified_since=_if_modified_since, if_unmodified_since=_if_unmodified_since, @@ -1635,7 +1646,6 @@ async def release_lease( request_id_parameter=request_id_parameter, comp=comp, action=action, - version=self._config.version, headers=_headers, params=_params, ) @@ -1728,6 +1738,7 @@ async def renew_lease( _request = build_renew_lease_request( url=self._config.url, lease_id=lease_id, + version=self._config.version, timeout=timeout, if_modified_since=_if_modified_since, if_unmodified_since=_if_unmodified_since, @@ -1737,7 +1748,6 @@ async def renew_lease( request_id_parameter=request_id_parameter, comp=comp, action=action, - version=self._config.version, headers=_headers, params=_params, ) @@ -1837,6 +1847,7 @@ async def change_lease( url=self._config.url, lease_id=lease_id, proposed_lease_id=proposed_lease_id, + version=self._config.version, timeout=timeout, if_modified_since=_if_modified_since, if_unmodified_since=_if_unmodified_since, @@ -1846,7 +1857,6 @@ async def change_lease( request_id_parameter=request_id_parameter, comp=comp, action=action, - version=self._config.version, headers=_headers, params=_params, ) @@ -1945,6 +1955,7 @@ async def break_lease( _request = build_break_lease_request( url=self._config.url, + version=self._config.version, timeout=timeout, break_period=break_period, if_modified_since=_if_modified_since, @@ -1955,7 +1966,6 @@ async def break_lease( request_id_parameter=request_id_parameter, comp=comp, action=action, - version=self._config.version, headers=_headers, params=_params, ) @@ -2074,6 +2084,7 @@ async def create_snapshot( _request = build_create_snapshot_request( url=self._config.url, + version=self._config.version, timeout=timeout, metadata=metadata, encryption_key=_encryption_key, @@ -2088,7 +2099,6 @@ async def create_snapshot( lease_id=_lease_id, request_id_parameter=request_id_parameter, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -2128,7 +2138,7 @@ async def create_snapshot( return cls(pipeline_response, None, response_headers) # type: ignore @distributed_trace_async - async def start_copy_from_url( + async def start_copy_from_url( # pylint: disable=too-many-locals self, copy_source: str, timeout: Optional[int] = None, @@ -2244,6 +2254,7 @@ async def start_copy_from_url( _request = build_start_copy_from_url_request( url=self._config.url, copy_source=copy_source, + version=self._config.version, timeout=timeout, metadata=metadata, tier=tier, @@ -2265,7 +2276,6 @@ async def start_copy_from_url( immutability_policy_expiry=immutability_policy_expiry, immutability_policy_mode=immutability_policy_mode, legal_hold=legal_hold, - version=self._config.version, headers=_headers, params=_params, ) @@ -2303,7 +2313,7 @@ async def start_copy_from_url( return cls(pipeline_response, None, response_headers) # type: ignore @distributed_trace_async - async def copy_from_url( + async def copy_from_url( # pylint: disable=too-many-locals self, copy_source: str, timeout: Optional[int] = None, @@ -2435,6 +2445,7 @@ async def copy_from_url( _request = build_copy_from_url_request( url=self._config.url, copy_source=copy_source, + version=self._config.version, timeout=timeout, metadata=metadata, tier=tier, @@ -2459,7 +2470,6 @@ async def copy_from_url( copy_source_tags=copy_source_tags, file_request_intent=file_request_intent, x_ms_requires_sync=x_ms_requires_sync, - version=self._config.version, headers=_headers, params=_params, ) @@ -2557,12 +2567,12 @@ async def abort_copy_from_url( _request = build_abort_copy_from_url_request( url=self._config.url, copy_id=copy_id, + version=self._config.version, timeout=timeout, lease_id=_lease_id, request_id_parameter=request_id_parameter, comp=comp, copy_action_abort_constant=copy_action_abort_constant, - version=self._config.version, headers=_headers, params=_params, ) @@ -2671,6 +2681,7 @@ async def set_tier( _request = build_set_tier_request( url=self._config.url, tier=tier, + version=self._config.version, snapshot=snapshot, version_id=version_id, timeout=timeout, @@ -2679,7 +2690,6 @@ async def set_tier( lease_id=_lease_id, if_tags=_if_tags, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -2746,11 +2756,11 @@ async def get_account_info( _request = build_get_account_info_request( url=self._config.url, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, restype=restype, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -2871,6 +2881,7 @@ async def query( _request = build_query_request( url=self._config.url, + version=self._config.version, snapshot=snapshot, timeout=timeout, lease_id=_lease_id, @@ -2885,7 +2896,6 @@ async def query( request_id_parameter=request_id_parameter, comp=comp, content_type=content_type, - version=self._config.version, content=_content, headers=_headers, params=_params, @@ -3114,6 +3124,7 @@ async def get_tags( _request = build_get_tags_request( url=self._config.url, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, snapshot=snapshot, @@ -3125,7 +3136,6 @@ async def get_tags( if_match=_if_match, if_none_match=_if_none_match, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -3246,6 +3256,7 @@ async def set_tags( _request = build_set_tags_request( url=self._config.url, + version=self._config.version, timeout=timeout, version_id=version_id, transactional_content_md5=transactional_content_md5, @@ -3259,7 +3270,6 @@ async def set_tags( if_none_match=_if_none_match, comp=comp, content_type=content_type, - version=self._config.version, content=_content, headers=_headers, params=_params, diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_block_blob_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_block_blob_operations.py index 7031e68bf8bb..6356da0264ec 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_block_blob_operations.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_block_blob_operations.py @@ -60,7 +60,7 @@ def __init__(self, *args, **kwargs) -> None: self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace_async - async def upload( + async def upload( # pylint: disable=too-many-locals self, content_length: int, body: IO[bytes], @@ -209,6 +209,7 @@ async def upload( _request = build_upload_request( url=self._config.url, content_length=content_length, + version=self._config.version, timeout=timeout, transactional_content_md5=transactional_content_md5, blob_content_type=_blob_content_type, @@ -239,7 +240,6 @@ async def upload( structured_content_length=structured_content_length, blob_type=blob_type, content_type=content_type, - version=self._config.version, content=_content, headers=_headers, params=_params, @@ -289,7 +289,7 @@ async def upload( return cls(pipeline_response, None, response_headers) # type: ignore @distributed_trace_async - async def put_blob_from_url( + async def put_blob_from_url( # pylint: disable=too-many-locals self, content_length: int, copy_source: str, @@ -310,6 +310,7 @@ async def put_blob_from_url( cpk_scope_info: Optional[_models.CpkScopeInfo] = None, modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, source_modified_access_conditions: Optional[_models.SourceModifiedAccessConditions] = None, + source_cpk_info: Optional[_models.SourceCpkInfo] = None, **kwargs: Any ) -> None: """The Put Blob from URL operation creates a new Block Blob where the contents of the blob are @@ -380,6 +381,8 @@ async def put_blob_from_url( :param source_modified_access_conditions: Parameter group. Default value is None. :type source_modified_access_conditions: ~azure.storage.blob.models.SourceModifiedAccessConditions + :param source_cpk_info: Parameter group. Default value is None. + :type source_cpk_info: ~azure.storage.blob.models.SourceCpkInfo :return: None or the result of cls(response) :rtype: None :raises ~azure.core.exceptions.HttpResponseError: @@ -419,6 +422,9 @@ async def put_blob_from_url( _source_if_match = None _source_if_none_match = None _source_if_tags = None + _source_encryption_key = None + _source_encryption_key_sha256 = None + _source_encryption_algorithm = None if blob_http_headers is not None: _blob_cache_control = blob_http_headers.blob_cache_control _blob_content_disposition = blob_http_headers.blob_content_disposition @@ -446,11 +452,16 @@ async def put_blob_from_url( _source_if_none_match = source_modified_access_conditions.source_if_none_match _source_if_tags = source_modified_access_conditions.source_if_tags _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since + if source_cpk_info is not None: + _source_encryption_algorithm = source_cpk_info.source_encryption_algorithm + _source_encryption_key = source_cpk_info.source_encryption_key + _source_encryption_key_sha256 = source_cpk_info.source_encryption_key_sha256 _request = build_put_blob_from_url_request( url=self._config.url, content_length=content_length, copy_source=copy_source, + version=self._config.version, timeout=timeout, transactional_content_md5=transactional_content_md5, blob_content_type=_blob_content_type, @@ -483,8 +494,10 @@ async def put_blob_from_url( copy_source_authorization=copy_source_authorization, copy_source_tags=copy_source_tags, file_request_intent=file_request_intent, + source_encryption_key=_source_encryption_key, + source_encryption_key_sha256=_source_encryption_key_sha256, + source_encryption_algorithm=_source_encryption_algorithm, blob_type=blob_type, - version=self._config.version, headers=_headers, params=_params, ) @@ -530,7 +543,7 @@ async def put_blob_from_url( return cls(pipeline_response, None, response_headers) # type: ignore @distributed_trace_async - async def stage_block( + async def stage_block( # pylint: disable=too-many-locals self, block_id: str, content_length: int, @@ -622,6 +635,7 @@ async def stage_block( url=self._config.url, block_id=block_id, content_length=content_length, + version=self._config.version, transactional_content_md5=transactional_content_md5, transactional_content_crc64=transactional_content_crc64, timeout=timeout, @@ -635,7 +649,6 @@ async def stage_block( structured_content_length=structured_content_length, comp=comp, content_type=content_type, - version=self._config.version, content=_content, headers=_headers, params=_params, @@ -685,7 +698,7 @@ async def stage_block( return cls(pipeline_response, None, response_headers) # type: ignore @distributed_trace_async - async def stage_block_from_url( + async def stage_block_from_url( # pylint: disable=too-many-locals self, block_id: str, content_length: int, @@ -701,6 +714,7 @@ async def stage_block_from_url( cpk_scope_info: Optional[_models.CpkScopeInfo] = None, lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, source_modified_access_conditions: Optional[_models.SourceModifiedAccessConditions] = None, + source_cpk_info: Optional[_models.SourceCpkInfo] = None, **kwargs: Any ) -> None: """The Stage Block operation creates a new block to be committed as part of a blob where the @@ -745,6 +759,8 @@ async def stage_block_from_url( :param source_modified_access_conditions: Parameter group. Default value is None. :type source_modified_access_conditions: ~azure.storage.blob.models.SourceModifiedAccessConditions + :param source_cpk_info: Parameter group. Default value is None. + :type source_cpk_info: ~azure.storage.blob.models.SourceCpkInfo :return: None or the result of cls(response) :rtype: None :raises ~azure.core.exceptions.HttpResponseError: @@ -772,6 +788,9 @@ async def stage_block_from_url( _source_if_unmodified_since = None _source_if_match = None _source_if_none_match = None + _source_encryption_key = None + _source_encryption_key_sha256 = None + _source_encryption_algorithm = None if cpk_info is not None: _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key @@ -785,12 +804,17 @@ async def stage_block_from_url( _source_if_modified_since = source_modified_access_conditions.source_if_modified_since _source_if_none_match = source_modified_access_conditions.source_if_none_match _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since + if source_cpk_info is not None: + _source_encryption_algorithm = source_cpk_info.source_encryption_algorithm + _source_encryption_key = source_cpk_info.source_encryption_key + _source_encryption_key_sha256 = source_cpk_info.source_encryption_key_sha256 _request = build_stage_block_from_url_request( url=self._config.url, block_id=block_id, content_length=content_length, source_url=source_url, + version=self._config.version, source_range=source_range, source_content_md5=source_content_md5, source_contentcrc64=source_contentcrc64, @@ -807,8 +831,10 @@ async def stage_block_from_url( request_id_parameter=request_id_parameter, copy_source_authorization=copy_source_authorization, file_request_intent=file_request_intent, + source_encryption_key=_source_encryption_key, + source_encryption_key_sha256=_source_encryption_key_sha256, + source_encryption_algorithm=_source_encryption_algorithm, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -854,7 +880,7 @@ async def stage_block_from_url( return cls(pipeline_response, None, response_headers) # type: ignore @distributed_trace_async - async def commit_block_list( + async def commit_block_list( # pylint: disable=too-many-locals self, blocks: _models.BlockLookupList, timeout: Optional[int] = None, @@ -992,6 +1018,7 @@ async def commit_block_list( _request = build_commit_block_list_request( url=self._config.url, + version=self._config.version, timeout=timeout, blob_cache_control=_blob_cache_control, blob_content_type=_blob_content_type, @@ -1020,7 +1047,6 @@ async def commit_block_list( legal_hold=legal_hold, comp=comp, content_type=content_type, - version=self._config.version, content=_content, headers=_headers, params=_params, @@ -1133,6 +1159,7 @@ async def get_block_list( _request = build_get_block_list_request( url=self._config.url, + version=self._config.version, snapshot=snapshot, list_type=list_type, timeout=timeout, @@ -1140,7 +1167,6 @@ async def get_block_list( if_tags=_if_tags, request_id_parameter=request_id_parameter, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_container_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_container_operations.py index 22728c401369..09bb123a20af 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_container_operations.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_container_operations.py @@ -133,6 +133,7 @@ async def create( _request = build_create_request( url=self._config.url, + version=self._config.version, timeout=timeout, metadata=metadata, access=access, @@ -140,7 +141,6 @@ async def create( default_encryption_scope=_default_encryption_scope, prevent_encryption_scope_override=_prevent_encryption_scope_override, restype=restype, - version=self._config.version, headers=_headers, params=_params, ) @@ -220,11 +220,11 @@ async def get_properties( _request = build_get_properties_request( url=self._config.url, + version=self._config.version, timeout=timeout, lease_id=_lease_id, request_id_parameter=request_id_parameter, restype=restype, - version=self._config.version, headers=_headers, params=_params, ) @@ -332,13 +332,13 @@ async def delete( _request = build_delete_request( url=self._config.url, + version=self._config.version, timeout=timeout, lease_id=_lease_id, if_modified_since=_if_modified_since, if_unmodified_since=_if_unmodified_since, request_id_parameter=request_id_parameter, restype=restype, - version=self._config.version, headers=_headers, params=_params, ) @@ -431,6 +431,7 @@ async def set_metadata( _request = build_set_metadata_request( url=self._config.url, + version=self._config.version, timeout=timeout, lease_id=_lease_id, metadata=metadata, @@ -438,7 +439,6 @@ async def set_metadata( request_id_parameter=request_id_parameter, restype=restype, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -519,12 +519,12 @@ async def get_access_policy( _request = build_get_access_policy_request( url=self._config.url, + version=self._config.version, timeout=timeout, lease_id=_lease_id, request_id_parameter=request_id_parameter, restype=restype, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -636,6 +636,7 @@ async def set_access_policy( _request = build_set_access_policy_request( url=self._config.url, + version=self._config.version, timeout=timeout, lease_id=_lease_id, access=access, @@ -645,7 +646,6 @@ async def set_access_policy( restype=restype, comp=comp, content_type=content_type, - version=self._config.version, content=_content, headers=_headers, params=_params, @@ -727,13 +727,13 @@ async def restore( _request = build_restore_request( url=self._config.url, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, deleted_container_name=deleted_container_name, deleted_container_version=deleted_container_version, restype=restype, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -813,12 +813,12 @@ async def rename( _request = build_rename_request( url=self._config.url, source_container_name=source_container_name, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, source_lease_id=source_lease_id, restype=restype, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -901,12 +901,12 @@ async def submit_batch( _request = build_submit_batch_request( url=self._config.url, content_length=content_length, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, restype=restype, comp=comp, multipart_content_type=multipart_content_type, - version=self._config.version, content=_content, headers=_headers, params=_params, @@ -1009,6 +1009,7 @@ async def filter_blobs( _request = build_filter_blobs_request( url=self._config.url, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, where=where, @@ -1017,7 +1018,6 @@ async def filter_blobs( include=include, restype=restype, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -1113,6 +1113,7 @@ async def acquire_lease( _request = build_acquire_lease_request( url=self._config.url, + version=self._config.version, timeout=timeout, duration=duration, proposed_lease_id=proposed_lease_id, @@ -1122,7 +1123,6 @@ async def acquire_lease( comp=comp, restype=restype, action=action, - version=self._config.version, headers=_headers, params=_params, ) @@ -1211,6 +1211,7 @@ async def release_lease( _request = build_release_lease_request( url=self._config.url, lease_id=lease_id, + version=self._config.version, timeout=timeout, if_modified_since=_if_modified_since, if_unmodified_since=_if_unmodified_since, @@ -1218,7 +1219,6 @@ async def release_lease( comp=comp, restype=restype, action=action, - version=self._config.version, headers=_headers, params=_params, ) @@ -1306,6 +1306,7 @@ async def renew_lease( _request = build_renew_lease_request( url=self._config.url, lease_id=lease_id, + version=self._config.version, timeout=timeout, if_modified_since=_if_modified_since, if_unmodified_since=_if_unmodified_since, @@ -1313,7 +1314,6 @@ async def renew_lease( comp=comp, restype=restype, action=action, - version=self._config.version, headers=_headers, params=_params, ) @@ -1407,6 +1407,7 @@ async def break_lease( _request = build_break_lease_request( url=self._config.url, + version=self._config.version, timeout=timeout, break_period=break_period, if_modified_since=_if_modified_since, @@ -1415,7 +1416,6 @@ async def break_lease( comp=comp, restype=restype, action=action, - version=self._config.version, headers=_headers, params=_params, ) @@ -1510,6 +1510,7 @@ async def change_lease( url=self._config.url, lease_id=lease_id, proposed_lease_id=proposed_lease_id, + version=self._config.version, timeout=timeout, if_modified_since=_if_modified_since, if_unmodified_since=_if_unmodified_since, @@ -1517,7 +1518,6 @@ async def change_lease( comp=comp, restype=restype, action=action, - version=self._config.version, headers=_headers, params=_params, ) @@ -1620,6 +1620,7 @@ async def list_blob_flat_segment( _request = build_list_blob_flat_segment_request( url=self._config.url, + version=self._config.version, prefix=prefix, marker=marker, maxresults=maxresults, @@ -1629,7 +1630,6 @@ async def list_blob_flat_segment( request_id_parameter=request_id_parameter, restype=restype, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -1741,6 +1741,7 @@ async def list_blob_hierarchy_segment( _request = build_list_blob_hierarchy_segment_request( url=self._config.url, delimiter=delimiter, + version=self._config.version, prefix=prefix, marker=marker, maxresults=maxresults, @@ -1750,7 +1751,6 @@ async def list_blob_hierarchy_segment( request_id_parameter=request_id_parameter, restype=restype, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -1823,11 +1823,11 @@ async def get_account_info( _request = build_get_account_info_request( url=self._config.url, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, restype=restype, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_page_blob_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_page_blob_operations.py index d656ece5daa2..639ca92eade7 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_page_blob_operations.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_page_blob_operations.py @@ -63,7 +63,7 @@ def __init__(self, *args, **kwargs) -> None: self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace_async - async def create( + async def create( # pylint: disable=too-many-locals self, content_length: int, blob_content_length: int, @@ -195,6 +195,7 @@ async def create( url=self._config.url, content_length=content_length, blob_content_length=blob_content_length, + version=self._config.version, timeout=timeout, tier=tier, blob_content_type=_blob_content_type, @@ -221,7 +222,6 @@ async def create( immutability_policy_mode=immutability_policy_mode, legal_hold=legal_hold, blob_type=blob_type, - version=self._config.version, headers=_headers, params=_params, ) @@ -267,7 +267,7 @@ async def create( return cls(pipeline_response, None, response_headers) # type: ignore @distributed_trace_async - async def upload_pages( + async def upload_pages( # pylint: disable=too-many-locals self, content_length: int, body: IO[bytes], @@ -384,6 +384,7 @@ async def upload_pages( _request = build_upload_pages_request( url=self._config.url, content_length=content_length, + version=self._config.version, transactional_content_md5=transactional_content_md5, transactional_content_crc64=transactional_content_crc64, timeout=timeout, @@ -407,7 +408,6 @@ async def upload_pages( comp=comp, page_write=page_write, content_type=content_type, - version=self._config.version, content=_content, headers=_headers, params=_params, @@ -557,6 +557,7 @@ async def clear_pages( _request = build_clear_pages_request( url=self._config.url, content_length=content_length, + version=self._config.version, timeout=timeout, range=range, lease_id=_lease_id, @@ -575,7 +576,6 @@ async def clear_pages( request_id_parameter=request_id_parameter, comp=comp, page_write=page_write, - version=self._config.version, headers=_headers, params=_params, ) @@ -617,7 +617,7 @@ async def clear_pages( return cls(pipeline_response, None, response_headers) # type: ignore @distributed_trace_async - async def upload_pages_from_url( + async def upload_pages_from_url( # pylint: disable=too-many-locals self, source_url: str, source_range: str, @@ -635,6 +635,7 @@ async def upload_pages_from_url( sequence_number_access_conditions: Optional[_models.SequenceNumberAccessConditions] = None, modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, source_modified_access_conditions: Optional[_models.SourceModifiedAccessConditions] = None, + source_cpk_info: Optional[_models.SourceCpkInfo] = None, **kwargs: Any ) -> None: """The Upload Pages operation writes a range of pages to a page blob where the contents are read @@ -684,6 +685,8 @@ async def upload_pages_from_url( :param source_modified_access_conditions: Parameter group. Default value is None. :type source_modified_access_conditions: ~azure.storage.blob.models.SourceModifiedAccessConditions + :param source_cpk_info: Parameter group. Default value is None. + :type source_cpk_info: ~azure.storage.blob.models.SourceCpkInfo :return: None or the result of cls(response) :rtype: None :raises ~azure.core.exceptions.HttpResponseError: @@ -720,6 +723,9 @@ async def upload_pages_from_url( _source_if_unmodified_since = None _source_if_match = None _source_if_none_match = None + _source_encryption_key = None + _source_encryption_key_sha256 = None + _source_encryption_algorithm = None if cpk_info is not None: _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key @@ -745,6 +751,10 @@ async def upload_pages_from_url( _source_if_modified_since = source_modified_access_conditions.source_if_modified_since _source_if_none_match = source_modified_access_conditions.source_if_none_match _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since + if source_cpk_info is not None: + _source_encryption_algorithm = source_cpk_info.source_encryption_algorithm + _source_encryption_key = source_cpk_info.source_encryption_key + _source_encryption_key_sha256 = source_cpk_info.source_encryption_key_sha256 _request = build_upload_pages_from_url_request( url=self._config.url, @@ -752,6 +762,7 @@ async def upload_pages_from_url( source_range=source_range, content_length=content_length, range=range, + version=self._config.version, source_content_md5=source_content_md5, source_contentcrc64=source_contentcrc64, timeout=timeout, @@ -775,9 +786,11 @@ async def upload_pages_from_url( request_id_parameter=request_id_parameter, copy_source_authorization=copy_source_authorization, file_request_intent=file_request_intent, + source_encryption_key=_source_encryption_key, + source_encryption_key_sha256=_source_encryption_key_sha256, + source_encryption_algorithm=_source_encryption_algorithm, comp=comp, page_write=page_write, - version=self._config.version, headers=_headers, params=_params, ) @@ -910,6 +923,7 @@ async def get_page_ranges( _request = build_get_page_ranges_request( url=self._config.url, + version=self._config.version, snapshot=snapshot, timeout=timeout, range=range, @@ -923,7 +937,6 @@ async def get_page_ranges( marker=marker, maxresults=maxresults, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -1064,6 +1077,7 @@ async def get_page_ranges_diff( _request = build_get_page_ranges_diff_request( url=self._config.url, + version=self._config.version, snapshot=snapshot, timeout=timeout, prevsnapshot=prevsnapshot, @@ -1079,7 +1093,6 @@ async def get_page_ranges_diff( marker=marker, maxresults=maxresults, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -1200,6 +1213,7 @@ async def resize( _request = build_resize_request( url=self._config.url, blob_content_length=blob_content_length, + version=self._config.version, timeout=timeout, lease_id=_lease_id, encryption_key=_encryption_key, @@ -1213,7 +1227,6 @@ async def resize( if_tags=_if_tags, request_id_parameter=request_id_parameter, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -1321,6 +1334,7 @@ async def update_sequence_number( _request = build_update_sequence_number_request( url=self._config.url, sequence_number_action=sequence_number_action, + version=self._config.version, timeout=timeout, lease_id=_lease_id, if_modified_since=_if_modified_since, @@ -1331,7 +1345,6 @@ async def update_sequence_number( blob_sequence_number=blob_sequence_number, request_id_parameter=request_id_parameter, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -1432,6 +1445,7 @@ async def copy_incremental( _request = build_copy_incremental_request( url=self._config.url, copy_source=copy_source, + version=self._config.version, timeout=timeout, if_modified_since=_if_modified_since, if_unmodified_since=_if_unmodified_since, @@ -1440,7 +1454,6 @@ async def copy_incremental( if_tags=_if_tags, request_id_parameter=request_id_parameter, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_service_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_service_operations.py index c122e3eea681..b0cc80ff0561 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_service_operations.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_service_operations.py @@ -108,12 +108,12 @@ async def set_properties( _request = build_set_properties_request( url=self._config.url, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, restype=restype, comp=comp, content_type=content_type, - version=self._config.version, content=_content, headers=_headers, params=_params, @@ -182,11 +182,11 @@ async def get_properties( _request = build_get_properties_request( url=self._config.url, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, restype=restype, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -259,11 +259,11 @@ async def get_statistics( _request = build_get_statistics_request( url=self._config.url, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, restype=restype, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -362,6 +362,7 @@ async def list_containers_segment( _request = build_list_containers_segment_request( url=self._config.url, + version=self._config.version, prefix=prefix, marker=marker, maxresults=maxresults, @@ -369,7 +370,6 @@ async def list_containers_segment( timeout=timeout, request_id_parameter=request_id_parameter, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -450,12 +450,12 @@ async def get_user_delegation_key( _request = build_get_user_delegation_key_request( url=self._config.url, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, restype=restype, comp=comp, content_type=content_type, - version=self._config.version, content=_content, headers=_headers, params=_params, @@ -528,11 +528,11 @@ async def get_account_info( _request = build_get_account_info_request( url=self._config.url, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, restype=restype, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -617,11 +617,11 @@ async def submit_batch( _request = build_submit_batch_request( url=self._config.url, content_length=content_length, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, comp=comp, multipart_content_type=multipart_content_type, - version=self._config.version, content=_content, headers=_headers, params=_params, @@ -724,6 +724,7 @@ async def filter_blobs( _request = build_filter_blobs_request( url=self._config.url, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, where=where, @@ -731,7 +732,6 @@ async def filter_blobs( maxresults=maxresults, include=include, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/models/__init__.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/models/__init__.py index 8bc56b312fd4..95e38c268f1b 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/models/__init__.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/models/__init__.py @@ -60,6 +60,7 @@ RetentionPolicy, SequenceNumberAccessConditions, SignedIdentifier, + SourceCpkInfo, SourceModifiedAccessConditions, StaticWebsite, StorageError, @@ -149,6 +150,7 @@ "RetentionPolicy", "SequenceNumberAccessConditions", "SignedIdentifier", + "SourceCpkInfo", "SourceModifiedAccessConditions", "StaticWebsite", "StorageError", diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/models/_azure_blob_storage_enums.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/models/_azure_blob_storage_enums.py index 43dbfa6bc654..471d5924df20 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/models/_azure_blob_storage_enums.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/models/_azure_blob_storage_enums.py @@ -277,6 +277,9 @@ class SkuName(str, Enum, metaclass=CaseInsensitiveEnumMeta): STANDARD_RAGRS = "Standard_RAGRS" STANDARD_ZRS = "Standard_ZRS" PREMIUM_LRS = "Premium_LRS" + STANDARD_GZRS = "Standard_GZRS" + PREMIUM_ZRS = "Premium_ZRS" + STANDARD_RAGZRS = "Standard_RAGZRS" class StorageErrorCode(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -344,7 +347,7 @@ class StorageErrorCode(str, Enum, metaclass=CaseInsensitiveEnumMeta): COPY_ID_MISMATCH = "CopyIdMismatch" FEATURE_VERSION_MISMATCH = "FeatureVersionMismatch" INCREMENTAL_COPY_BLOB_MISMATCH = "IncrementalCopyBlobMismatch" - INCREMENTAL_COPY_OF_EARLIER_VERSION_SNAPSHOT_NOT_ALLOWED = "IncrementalCopyOfEarlierVersionSnapshotNotAllowed" + INCREMENTAL_COPY_OF_EARLIER_SNAPSHOT_NOT_ALLOWED = "IncrementalCopyOfEarlierSnapshotNotAllowed" INCREMENTAL_COPY_SOURCE_MUST_BE_SNAPSHOT = "IncrementalCopySourceMustBeSnapshot" INFINITE_LEASE_DURATION_REQUIRED = "InfiniteLeaseDurationRequired" INVALID_BLOB_OR_BLOCK = "InvalidBlobOrBlock" diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/models/_models_py3.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/models/_models_py3.py index 3c353de607d5..e3cb9c5b99eb 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/models/_models_py3.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/models/_models_py3.py @@ -1697,6 +1697,8 @@ class KeyInfo(_serialization.Model): :vartype start: str :ivar expiry: The date-time the key expires in ISO 8601 UTC time. Required. :vartype expiry: str + :ivar delegated_user_tid: The delegated user tenant id in Azure AD. + :vartype delegated_user_tid: str """ _validation = { @@ -1707,18 +1709,22 @@ class KeyInfo(_serialization.Model): _attribute_map = { "start": {"key": "Start", "type": "str"}, "expiry": {"key": "Expiry", "type": "str"}, + "delegated_user_tid": {"key": "DelegatedUserTid", "type": "str"}, } - def __init__(self, *, start: str, expiry: str, **kwargs: Any) -> None: + def __init__(self, *, start: str, expiry: str, delegated_user_tid: Optional[str] = None, **kwargs: Any) -> None: """ :keyword start: The date-time the key is active in ISO 8601 UTC time. Required. :paramtype start: str :keyword expiry: The date-time the key expires in ISO 8601 UTC time. Required. :paramtype expiry: str + :keyword delegated_user_tid: The delegated user tenant id in Azure AD. + :paramtype delegated_user_tid: str """ super().__init__(**kwargs) self.start = start self.expiry = expiry + self.delegated_user_tid = delegated_user_tid class LeaseAccessConditions(_serialization.Model): @@ -2507,6 +2513,54 @@ def __init__( self.access_policy = access_policy +class SourceCpkInfo(_serialization.Model): + """Parameter group. + + :ivar source_encryption_key: Optional. Specifies the source encryption key to use to encrypt + the source data provided in the request. + :vartype source_encryption_key: str + :ivar source_encryption_key_sha256: The SHA-256 hash of the provided source encryption key. + Must be provided if the x-ms-source-encryption-key header is provided. + :vartype source_encryption_key_sha256: str + :ivar source_encryption_algorithm: The algorithm used to produce the source encryption key + hash. Currently, the only accepted value is "AES256". Must be provided if the + x-ms-source-encryption-key is provided. Known values are: "None" and "AES256". + :vartype source_encryption_algorithm: str or ~azure.storage.blob.models.EncryptionAlgorithmType + """ + + _attribute_map = { + "source_encryption_key": {"key": "sourceEncryptionKey", "type": "str"}, + "source_encryption_key_sha256": {"key": "sourceEncryptionKeySha256", "type": "str"}, + "source_encryption_algorithm": {"key": "sourceEncryptionAlgorithm", "type": "str"}, + } + + def __init__( + self, + *, + source_encryption_key: Optional[str] = None, + source_encryption_key_sha256: Optional[str] = None, + source_encryption_algorithm: Optional[Union[str, "_models.EncryptionAlgorithmType"]] = None, + **kwargs: Any + ) -> None: + """ + :keyword source_encryption_key: Optional. Specifies the source encryption key to use to encrypt + the source data provided in the request. + :paramtype source_encryption_key: str + :keyword source_encryption_key_sha256: The SHA-256 hash of the provided source encryption key. + Must be provided if the x-ms-source-encryption-key header is provided. + :paramtype source_encryption_key_sha256: str + :keyword source_encryption_algorithm: The algorithm used to produce the source encryption key + hash. Currently, the only accepted value is "AES256". Must be provided if the + x-ms-source-encryption-key is provided. Known values are: "None" and "AES256". + :paramtype source_encryption_algorithm: str or + ~azure.storage.blob.models.EncryptionAlgorithmType + """ + super().__init__(**kwargs) + self.source_encryption_key = source_encryption_key + self.source_encryption_key_sha256 = source_encryption_key_sha256 + self.source_encryption_algorithm = source_encryption_algorithm + + class SourceModifiedAccessConditions(_serialization.Model): """Parameter group. @@ -2780,6 +2834,9 @@ class UserDelegationKey(_serialization.Model): :vartype signed_service: str :ivar signed_version: The service version that created the key. Required. :vartype signed_version: str + :ivar signed_delegated_user_tid: The delegated user tenant id in Azure AD. Return if + DelegatedUserTid is specified. + :vartype signed_delegated_user_tid: str :ivar value: The key as a base64 string. Required. :vartype value: str """ @@ -2801,6 +2858,7 @@ class UserDelegationKey(_serialization.Model): "signed_expiry": {"key": "SignedExpiry", "type": "iso-8601"}, "signed_service": {"key": "SignedService", "type": "str"}, "signed_version": {"key": "SignedVersion", "type": "str"}, + "signed_delegated_user_tid": {"key": "SignedDelegatedUserTid", "type": "str"}, "value": {"key": "Value", "type": "str"}, } @@ -2814,6 +2872,7 @@ def __init__( signed_service: str, signed_version: str, value: str, + signed_delegated_user_tid: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -2830,6 +2889,9 @@ def __init__( :paramtype signed_service: str :keyword signed_version: The service version that created the key. Required. :paramtype signed_version: str + :keyword signed_delegated_user_tid: The delegated user tenant id in Azure AD. Return if + DelegatedUserTid is specified. + :paramtype signed_delegated_user_tid: str :keyword value: The key as a base64 string. Required. :paramtype value: str """ @@ -2840,4 +2902,5 @@ def __init__( self.signed_expiry = signed_expiry self.signed_service = signed_service self.signed_version = signed_version + self.signed_delegated_user_tid = signed_delegated_user_tid self.value = value diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_append_blob_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_append_blob_operations.py index 3c6045a31599..70a3ddb8178f 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_append_blob_operations.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_append_blob_operations.py @@ -35,10 +35,11 @@ _SERIALIZER.client_side_validation = False -def build_create_request( +def build_create_request( # pylint: disable=too-many-locals url: str, *, content_length: int, + version: str, timeout: Optional[int] = None, blob_content_type: Optional[str] = None, blob_content_encoding: Optional[str] = None, @@ -68,7 +69,6 @@ def build_create_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) blob_type: Literal["AppendBlob"] = kwargs.pop("blob_type", _headers.pop("x-ms-blob-type", "AppendBlob")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -148,11 +148,12 @@ def build_create_request( return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) -def build_append_block_request( +def build_append_block_request( # pylint: disable=too-many-locals url: str, *, content_length: int, content: IO[bytes], + version: str, timeout: Optional[int] = None, transactional_content_md5: Optional[bytes] = None, transactional_content_crc64: Optional[bytes] = None, @@ -178,7 +179,6 @@ def build_append_block_request( comp: Literal["appendblock"] = kwargs.pop("comp", _params.pop("comp", "appendblock")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -246,11 +246,12 @@ def build_append_block_request( return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, content=content, **kwargs) -def build_append_block_from_url_request( +def build_append_block_from_url_request( # pylint: disable=too-many-locals,too-many-statements,too-many-branches url: str, *, source_url: str, content_length: int, + version: str, source_range: Optional[str] = None, source_content_md5: Optional[bytes] = None, source_contentcrc64: Optional[bytes] = None, @@ -275,13 +276,15 @@ def build_append_block_from_url_request( request_id_parameter: Optional[str] = None, copy_source_authorization: Optional[str] = None, file_request_intent: Optional[Union[str, _models.FileShareTokenIntent]] = None, + source_encryption_key: Optional[str] = None, + source_encryption_key_sha256: Optional[str] = None, + source_encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) comp: Literal["appendblock"] = kwargs.pop("comp", _params.pop("comp", "appendblock")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -359,6 +362,18 @@ def build_append_block_from_url_request( ) if file_request_intent is not None: _headers["x-ms-file-request-intent"] = _SERIALIZER.header("file_request_intent", file_request_intent, "str") + if source_encryption_key is not None: + _headers["x-ms-source-encryption-key"] = _SERIALIZER.header( + "source_encryption_key", source_encryption_key, "str" + ) + if source_encryption_key_sha256 is not None: + _headers["x-ms-source-encryption-key-sha256"] = _SERIALIZER.header( + "source_encryption_key_sha256", source_encryption_key_sha256, "str" + ) + if source_encryption_algorithm is not None: + _headers["x-ms-source-encryption-algorithm"] = _SERIALIZER.header( + "source_encryption_algorithm", source_encryption_algorithm, "str" + ) _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) @@ -367,6 +382,7 @@ def build_append_block_from_url_request( def build_seal_request( url: str, *, + version: str, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, lease_id: Optional[str] = None, @@ -381,7 +397,6 @@ def build_seal_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) comp: Literal["seal"] = kwargs.pop("comp", _params.pop("comp", "seal")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -438,7 +453,7 @@ def __init__(self, *args, **kwargs) -> None: self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace - def create( # pylint: disable=inconsistent-return-statements + def create( # pylint: disable=inconsistent-return-statements,too-many-locals self, content_length: int, timeout: Optional[int] = None, @@ -556,6 +571,7 @@ def create( # pylint: disable=inconsistent-return-statements _request = build_create_request( url=self._config.url, content_length=content_length, + version=self._config.version, timeout=timeout, blob_content_type=_blob_content_type, blob_content_encoding=_blob_content_encoding, @@ -580,7 +596,6 @@ def create( # pylint: disable=inconsistent-return-statements immutability_policy_mode=immutability_policy_mode, legal_hold=legal_hold, blob_type=blob_type, - version=self._config.version, headers=_headers, params=_params, ) @@ -626,7 +641,7 @@ def create( # pylint: disable=inconsistent-return-statements return cls(pipeline_response, None, response_headers) # type: ignore @distributed_trace - def append_block( # pylint: disable=inconsistent-return-statements + def append_block( # pylint: disable=inconsistent-return-statements,too-many-locals self, content_length: int, body: IO[bytes], @@ -737,6 +752,7 @@ def append_block( # pylint: disable=inconsistent-return-statements _request = build_append_block_request( url=self._config.url, content_length=content_length, + version=self._config.version, timeout=timeout, transactional_content_md5=transactional_content_md5, transactional_content_crc64=transactional_content_crc64, @@ -757,7 +773,6 @@ def append_block( # pylint: disable=inconsistent-return-statements structured_content_length=structured_content_length, comp=comp, content_type=content_type, - version=self._config.version, content=_content, headers=_headers, params=_params, @@ -815,7 +830,7 @@ def append_block( # pylint: disable=inconsistent-return-statements return cls(pipeline_response, None, response_headers) # type: ignore @distributed_trace - def append_block_from_url( # pylint: disable=inconsistent-return-statements + def append_block_from_url( # pylint: disable=inconsistent-return-statements,too-many-locals self, source_url: str, content_length: int, @@ -833,6 +848,7 @@ def append_block_from_url( # pylint: disable=inconsistent-return-statements append_position_access_conditions: Optional[_models.AppendPositionAccessConditions] = None, modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, source_modified_access_conditions: Optional[_models.SourceModifiedAccessConditions] = None, + source_cpk_info: Optional[_models.SourceCpkInfo] = None, **kwargs: Any ) -> None: """The Append Block operation commits a new block of data to the end of an existing append blob @@ -883,6 +899,8 @@ def append_block_from_url( # pylint: disable=inconsistent-return-statements :param source_modified_access_conditions: Parameter group. Default value is None. :type source_modified_access_conditions: ~azure.storage.blob.models.SourceModifiedAccessConditions + :param source_cpk_info: Parameter group. Default value is None. + :type source_cpk_info: ~azure.storage.blob.models.SourceCpkInfo :return: None or the result of cls(response) :rtype: None :raises ~azure.core.exceptions.HttpResponseError: @@ -917,6 +935,9 @@ def append_block_from_url( # pylint: disable=inconsistent-return-statements _source_if_unmodified_since = None _source_if_match = None _source_if_none_match = None + _source_encryption_key = None + _source_encryption_key_sha256 = None + _source_encryption_algorithm = None if cpk_info is not None: _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key @@ -939,11 +960,16 @@ def append_block_from_url( # pylint: disable=inconsistent-return-statements _source_if_modified_since = source_modified_access_conditions.source_if_modified_since _source_if_none_match = source_modified_access_conditions.source_if_none_match _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since + if source_cpk_info is not None: + _source_encryption_algorithm = source_cpk_info.source_encryption_algorithm + _source_encryption_key = source_cpk_info.source_encryption_key + _source_encryption_key_sha256 = source_cpk_info.source_encryption_key_sha256 _request = build_append_block_from_url_request( url=self._config.url, source_url=source_url, content_length=content_length, + version=self._config.version, source_range=source_range, source_content_md5=source_content_md5, source_contentcrc64=source_contentcrc64, @@ -968,8 +994,10 @@ def append_block_from_url( # pylint: disable=inconsistent-return-statements request_id_parameter=request_id_parameter, copy_source_authorization=copy_source_authorization, file_request_intent=file_request_intent, + source_encryption_key=_source_encryption_key, + source_encryption_key_sha256=_source_encryption_key_sha256, + source_encryption_algorithm=_source_encryption_algorithm, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -1084,6 +1112,7 @@ def seal( # pylint: disable=inconsistent-return-statements _request = build_seal_request( url=self._config.url, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, lease_id=_lease_id, @@ -1093,7 +1122,6 @@ def seal( # pylint: disable=inconsistent-return-statements if_none_match=_if_none_match, append_position=_append_position, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_blob_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_blob_operations.py index 79fe82c7b312..cb8e18ed1e0d 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_blob_operations.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_blob_operations.py @@ -40,6 +40,7 @@ def build_download_request( url: str, *, + version: str, snapshot: Optional[str] = None, version_id: Optional[str] = None, timeout: Optional[int] = None, @@ -62,7 +63,6 @@ def build_download_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -125,6 +125,7 @@ def build_download_request( def build_get_properties_request( url: str, *, + version: str, snapshot: Optional[str] = None, version_id: Optional[str] = None, timeout: Optional[int] = None, @@ -143,7 +144,6 @@ def build_get_properties_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -194,6 +194,7 @@ def build_get_properties_request( def build_delete_request( url: str, *, + version: str, snapshot: Optional[str] = None, version_id: Optional[str] = None, timeout: Optional[int] = None, @@ -206,12 +207,13 @@ def build_delete_request( if_tags: Optional[str] = None, request_id_parameter: Optional[str] = None, blob_delete_type: Literal["Permanent"] = "Permanent", + access_tier_if_modified_since: Optional[datetime.datetime] = None, + access_tier_if_unmodified_since: Optional[datetime.datetime] = None, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -250,19 +252,26 @@ def build_delete_request( _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") if request_id_parameter is not None: _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") + if access_tier_if_modified_since is not None: + _headers["x-ms-access-tier-if-modified-since"] = _SERIALIZER.header( + "access_tier_if_modified_since", access_tier_if_modified_since, "rfc-1123" + ) + if access_tier_if_unmodified_since is not None: + _headers["x-ms-access-tier-if-unmodified-since"] = _SERIALIZER.header( + "access_tier_if_unmodified_since", access_tier_if_unmodified_since, "rfc-1123" + ) _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) def build_undelete_request( - url: str, *, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any + url: str, *, version: str, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) comp: Literal["undelete"] = kwargs.pop("comp", _params.pop("comp", "undelete")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -291,6 +300,7 @@ def build_set_expiry_request( url: str, *, expiry_options: Union[str, _models.BlobExpiryOptions], + version: str, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, expires_on: Optional[str] = None, @@ -300,7 +310,6 @@ def build_set_expiry_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) comp: Literal["expiry"] = kwargs.pop("comp", _params.pop("comp", "expiry")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -331,6 +340,7 @@ def build_set_expiry_request( def build_set_http_headers_request( url: str, *, + version: str, timeout: Optional[int] = None, blob_cache_control: Optional[str] = None, blob_content_type: Optional[str] = None, @@ -351,7 +361,6 @@ def build_set_http_headers_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -409,6 +418,7 @@ def build_set_http_headers_request( def build_set_immutability_policy_request( url: str, *, + version: str, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, if_unmodified_since: Optional[datetime.datetime] = None, @@ -422,7 +432,6 @@ def build_set_immutability_policy_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) comp: Literal["immutabilityPolicies"] = kwargs.pop("comp", _params.pop("comp", "immutabilityPolicies")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -464,6 +473,7 @@ def build_set_immutability_policy_request( def build_delete_immutability_policy_request( url: str, *, + version: str, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, snapshot: Optional[str] = None, @@ -474,7 +484,6 @@ def build_delete_immutability_policy_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) comp: Literal["immutabilityPolicies"] = kwargs.pop("comp", _params.pop("comp", "immutabilityPolicies")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -507,6 +516,7 @@ def build_set_legal_hold_request( url: str, *, legal_hold: bool, + version: str, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, snapshot: Optional[str] = None, @@ -517,7 +527,6 @@ def build_set_legal_hold_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) comp: Literal["legalhold"] = kwargs.pop("comp", _params.pop("comp", "legalhold")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -550,6 +559,7 @@ def build_set_legal_hold_request( def build_set_metadata_request( url: str, *, + version: str, timeout: Optional[int] = None, metadata: Optional[dict[str, str]] = None, lease_id: Optional[str] = None, @@ -569,7 +579,6 @@ def build_set_metadata_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) comp: Literal["metadata"] = kwargs.pop("comp", _params.pop("comp", "metadata")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -621,6 +630,7 @@ def build_set_metadata_request( def build_acquire_lease_request( url: str, *, + version: str, timeout: Optional[int] = None, duration: Optional[int] = None, proposed_lease_id: Optional[str] = None, @@ -637,7 +647,6 @@ def build_acquire_lease_request( comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) action: Literal["acquire"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "acquire")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -681,6 +690,7 @@ def build_release_lease_request( url: str, *, lease_id: str, + version: str, timeout: Optional[int] = None, if_modified_since: Optional[datetime.datetime] = None, if_unmodified_since: Optional[datetime.datetime] = None, @@ -695,7 +705,6 @@ def build_release_lease_request( comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) action: Literal["release"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "release")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -736,6 +745,7 @@ def build_renew_lease_request( url: str, *, lease_id: str, + version: str, timeout: Optional[int] = None, if_modified_since: Optional[datetime.datetime] = None, if_unmodified_since: Optional[datetime.datetime] = None, @@ -750,7 +760,6 @@ def build_renew_lease_request( comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) action: Literal["renew"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "renew")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -792,6 +801,7 @@ def build_change_lease_request( *, lease_id: str, proposed_lease_id: str, + version: str, timeout: Optional[int] = None, if_modified_since: Optional[datetime.datetime] = None, if_unmodified_since: Optional[datetime.datetime] = None, @@ -806,7 +816,6 @@ def build_change_lease_request( comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) action: Literal["change"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "change")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -847,6 +856,7 @@ def build_change_lease_request( def build_break_lease_request( url: str, *, + version: str, timeout: Optional[int] = None, break_period: Optional[int] = None, if_modified_since: Optional[datetime.datetime] = None, @@ -862,7 +872,6 @@ def build_break_lease_request( comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) action: Literal["break"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "break")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -903,6 +912,7 @@ def build_break_lease_request( def build_create_snapshot_request( url: str, *, + version: str, timeout: Optional[int] = None, metadata: Optional[dict[str, str]] = None, encryption_key: Optional[str] = None, @@ -922,7 +932,6 @@ def build_create_snapshot_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) comp: Literal["snapshot"] = kwargs.pop("comp", _params.pop("comp", "snapshot")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -971,10 +980,11 @@ def build_create_snapshot_request( return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) -def build_start_copy_from_url_request( +def build_start_copy_from_url_request( # pylint: disable=too-many-locals url: str, *, copy_source: str, + version: str, timeout: Optional[int] = None, metadata: Optional[dict[str, str]] = None, tier: Optional[Union[str, _models.AccessTierOptional]] = None, @@ -1001,7 +1011,6 @@ def build_start_copy_from_url_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -1072,10 +1081,11 @@ def build_start_copy_from_url_request( return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) -def build_copy_from_url_request( +def build_copy_from_url_request( # pylint: disable=too-many-locals url: str, *, copy_source: str, + version: str, timeout: Optional[int] = None, metadata: Optional[dict[str, str]] = None, tier: Optional[Union[str, _models.AccessTierOptional]] = None, @@ -1105,7 +1115,6 @@ def build_copy_from_url_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) x_ms_requires_sync: Literal["true"] = kwargs.pop("x_ms_requires_sync", _headers.pop("x-ms-requires-sync", "true")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -1187,6 +1196,7 @@ def build_abort_copy_from_url_request( url: str, *, copy_id: str, + version: str, timeout: Optional[int] = None, lease_id: Optional[str] = None, request_id_parameter: Optional[str] = None, @@ -1199,7 +1209,6 @@ def build_abort_copy_from_url_request( copy_action_abort_constant: Literal["abort"] = kwargs.pop( "copy_action_abort_constant", _headers.pop("x-ms-copy-action", "abort") ) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -1232,6 +1241,7 @@ def build_set_tier_request( url: str, *, tier: Union[str, _models.AccessTierRequired], + version: str, snapshot: Optional[str] = None, version_id: Optional[str] = None, timeout: Optional[int] = None, @@ -1245,7 +1255,6 @@ def build_set_tier_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) comp: Literal["tier"] = kwargs.pop("comp", _params.pop("comp", "tier")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -1282,14 +1291,13 @@ def build_set_tier_request( def build_get_account_info_request( - url: str, *, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any + url: str, *, version: str, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) restype: Literal["account"] = kwargs.pop("restype", _params.pop("restype", "account")) comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -1318,6 +1326,7 @@ def build_get_account_info_request( def build_query_request( url: str, *, + version: str, snapshot: Optional[str] = None, timeout: Optional[int] = None, lease_id: Optional[str] = None, @@ -1338,7 +1347,6 @@ def build_query_request( comp: Literal["query"] = kwargs.pop("comp", _params.pop("comp", "query")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -1390,6 +1398,7 @@ def build_query_request( def build_get_tags_request( url: str, *, + version: str, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, snapshot: Optional[str] = None, @@ -1406,7 +1415,6 @@ def build_get_tags_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) comp: Literal["tags"] = kwargs.pop("comp", _params.pop("comp", "tags")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -1452,6 +1460,7 @@ def build_get_tags_request( def build_set_tags_request( url: str, *, + version: str, timeout: Optional[int] = None, version_id: Optional[str] = None, transactional_content_md5: Optional[bytes] = None, @@ -1471,7 +1480,6 @@ def build_set_tags_request( comp: Literal["tags"] = kwargs.pop("comp", _params.pop("comp", "tags")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -1639,6 +1647,7 @@ def download( _request = build_download_request( url=self._config.url, + version=self._config.version, snapshot=snapshot, version_id=version_id, timeout=timeout, @@ -1656,7 +1665,6 @@ def download( if_none_match=_if_none_match, if_tags=_if_tags, request_id_parameter=request_id_parameter, - version=self._config.version, headers=_headers, params=_params, ) @@ -1944,6 +1952,7 @@ def get_properties( # pylint: disable=inconsistent-return-statements _request = build_get_properties_request( url=self._config.url, + version=self._config.version, snapshot=snapshot, version_id=version_id, timeout=timeout, @@ -1957,7 +1966,6 @@ def get_properties( # pylint: disable=inconsistent-return-statements if_none_match=_if_none_match, if_tags=_if_tags, request_id_parameter=request_id_parameter, - version=self._config.version, headers=_headers, params=_params, ) @@ -2077,6 +2085,8 @@ def delete( # pylint: disable=inconsistent-return-statements delete_snapshots: Optional[Union[str, _models.DeleteSnapshotsOptionType]] = None, request_id_parameter: Optional[str] = None, blob_delete_type: Literal["Permanent"] = "Permanent", + access_tier_if_modified_since: Optional[datetime.datetime] = None, + access_tier_if_unmodified_since: Optional[datetime.datetime] = None, lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any @@ -2122,6 +2132,12 @@ def delete( # pylint: disable=inconsistent-return-statements permanently delete a blob if blob soft delete is enabled. Known values are "Permanent" and None. Default value is "Permanent". :type blob_delete_type: str + :param access_tier_if_modified_since: Specify this header value to operate only on a blob if + the access-tier has been modified since the specified date/time. Default value is None. + :type access_tier_if_modified_since: ~datetime.datetime + :param access_tier_if_unmodified_since: Specify this header value to operate only on a blob if + the access-tier has not been modified since the specified date/time. Default value is None. + :type access_tier_if_unmodified_since: ~datetime.datetime :param lease_access_conditions: Parameter group. Default value is None. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions :param modified_access_conditions: Parameter group. Default value is None. @@ -2160,6 +2176,7 @@ def delete( # pylint: disable=inconsistent-return-statements _request = build_delete_request( url=self._config.url, + version=self._config.version, snapshot=snapshot, version_id=version_id, timeout=timeout, @@ -2172,7 +2189,8 @@ def delete( # pylint: disable=inconsistent-return-statements if_tags=_if_tags, request_id_parameter=request_id_parameter, blob_delete_type=blob_delete_type, - version=self._config.version, + access_tier_if_modified_since=access_tier_if_modified_since, + access_tier_if_unmodified_since=access_tier_if_unmodified_since, headers=_headers, params=_params, ) @@ -2239,10 +2257,10 @@ def undelete( # pylint: disable=inconsistent-return-statements _request = build_undelete_request( url=self._config.url, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -2320,11 +2338,11 @@ def set_expiry( # pylint: disable=inconsistent-return-statements _request = build_set_expiry_request( url=self._config.url, expiry_options=expiry_options, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, expires_on=expires_on, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -2433,6 +2451,7 @@ def set_http_headers( # pylint: disable=inconsistent-return-statements _request = build_set_http_headers_request( url=self._config.url, + version=self._config.version, timeout=timeout, blob_cache_control=_blob_cache_control, blob_content_type=_blob_content_type, @@ -2448,7 +2467,6 @@ def set_http_headers( # pylint: disable=inconsistent-return-statements blob_content_disposition=_blob_content_disposition, request_id_parameter=request_id_parameter, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -2550,6 +2568,7 @@ def set_immutability_policy( # pylint: disable=inconsistent-return-statements _request = build_set_immutability_policy_request( url=self._config.url, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, if_unmodified_since=_if_unmodified_since, @@ -2558,7 +2577,6 @@ def set_immutability_policy( # pylint: disable=inconsistent-return-statements snapshot=snapshot, version_id=version_id, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -2646,12 +2664,12 @@ def delete_immutability_policy( # pylint: disable=inconsistent-return-statement _request = build_delete_immutability_policy_request( url=self._config.url, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, snapshot=snapshot, version_id=version_id, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -2737,12 +2755,12 @@ def set_legal_hold( # pylint: disable=inconsistent-return-statements _request = build_set_legal_hold_request( url=self._config.url, legal_hold=legal_hold, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, snapshot=snapshot, version_id=version_id, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -2860,6 +2878,7 @@ def set_metadata( # pylint: disable=inconsistent-return-statements _request = build_set_metadata_request( url=self._config.url, + version=self._config.version, timeout=timeout, metadata=metadata, lease_id=_lease_id, @@ -2874,7 +2893,6 @@ def set_metadata( # pylint: disable=inconsistent-return-statements if_tags=_if_tags, request_id_parameter=request_id_parameter, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -2983,6 +3001,7 @@ def acquire_lease( # pylint: disable=inconsistent-return-statements _request = build_acquire_lease_request( url=self._config.url, + version=self._config.version, timeout=timeout, duration=duration, proposed_lease_id=proposed_lease_id, @@ -2994,7 +3013,6 @@ def acquire_lease( # pylint: disable=inconsistent-return-statements request_id_parameter=request_id_parameter, comp=comp, action=action, - version=self._config.version, headers=_headers, params=_params, ) @@ -3088,6 +3106,7 @@ def release_lease( # pylint: disable=inconsistent-return-statements _request = build_release_lease_request( url=self._config.url, lease_id=lease_id, + version=self._config.version, timeout=timeout, if_modified_since=_if_modified_since, if_unmodified_since=_if_unmodified_since, @@ -3097,7 +3116,6 @@ def release_lease( # pylint: disable=inconsistent-return-statements request_id_parameter=request_id_parameter, comp=comp, action=action, - version=self._config.version, headers=_headers, params=_params, ) @@ -3190,6 +3208,7 @@ def renew_lease( # pylint: disable=inconsistent-return-statements _request = build_renew_lease_request( url=self._config.url, lease_id=lease_id, + version=self._config.version, timeout=timeout, if_modified_since=_if_modified_since, if_unmodified_since=_if_unmodified_since, @@ -3199,7 +3218,6 @@ def renew_lease( # pylint: disable=inconsistent-return-statements request_id_parameter=request_id_parameter, comp=comp, action=action, - version=self._config.version, headers=_headers, params=_params, ) @@ -3299,6 +3317,7 @@ def change_lease( # pylint: disable=inconsistent-return-statements url=self._config.url, lease_id=lease_id, proposed_lease_id=proposed_lease_id, + version=self._config.version, timeout=timeout, if_modified_since=_if_modified_since, if_unmodified_since=_if_unmodified_since, @@ -3308,7 +3327,6 @@ def change_lease( # pylint: disable=inconsistent-return-statements request_id_parameter=request_id_parameter, comp=comp, action=action, - version=self._config.version, headers=_headers, params=_params, ) @@ -3407,6 +3425,7 @@ def break_lease( # pylint: disable=inconsistent-return-statements _request = build_break_lease_request( url=self._config.url, + version=self._config.version, timeout=timeout, break_period=break_period, if_modified_since=_if_modified_since, @@ -3417,7 +3436,6 @@ def break_lease( # pylint: disable=inconsistent-return-statements request_id_parameter=request_id_parameter, comp=comp, action=action, - version=self._config.version, headers=_headers, params=_params, ) @@ -3536,6 +3554,7 @@ def create_snapshot( # pylint: disable=inconsistent-return-statements _request = build_create_snapshot_request( url=self._config.url, + version=self._config.version, timeout=timeout, metadata=metadata, encryption_key=_encryption_key, @@ -3550,7 +3569,6 @@ def create_snapshot( # pylint: disable=inconsistent-return-statements lease_id=_lease_id, request_id_parameter=request_id_parameter, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -3590,7 +3608,7 @@ def create_snapshot( # pylint: disable=inconsistent-return-statements return cls(pipeline_response, None, response_headers) # type: ignore @distributed_trace - def start_copy_from_url( # pylint: disable=inconsistent-return-statements + def start_copy_from_url( # pylint: disable=inconsistent-return-statements,too-many-locals self, copy_source: str, timeout: Optional[int] = None, @@ -3706,6 +3724,7 @@ def start_copy_from_url( # pylint: disable=inconsistent-return-statements _request = build_start_copy_from_url_request( url=self._config.url, copy_source=copy_source, + version=self._config.version, timeout=timeout, metadata=metadata, tier=tier, @@ -3727,7 +3746,6 @@ def start_copy_from_url( # pylint: disable=inconsistent-return-statements immutability_policy_expiry=immutability_policy_expiry, immutability_policy_mode=immutability_policy_mode, legal_hold=legal_hold, - version=self._config.version, headers=_headers, params=_params, ) @@ -3765,7 +3783,7 @@ def start_copy_from_url( # pylint: disable=inconsistent-return-statements return cls(pipeline_response, None, response_headers) # type: ignore @distributed_trace - def copy_from_url( # pylint: disable=inconsistent-return-statements + def copy_from_url( # pylint: disable=inconsistent-return-statements,too-many-locals self, copy_source: str, timeout: Optional[int] = None, @@ -3897,6 +3915,7 @@ def copy_from_url( # pylint: disable=inconsistent-return-statements _request = build_copy_from_url_request( url=self._config.url, copy_source=copy_source, + version=self._config.version, timeout=timeout, metadata=metadata, tier=tier, @@ -3921,7 +3940,6 @@ def copy_from_url( # pylint: disable=inconsistent-return-statements copy_source_tags=copy_source_tags, file_request_intent=file_request_intent, x_ms_requires_sync=x_ms_requires_sync, - version=self._config.version, headers=_headers, params=_params, ) @@ -4019,12 +4037,12 @@ def abort_copy_from_url( # pylint: disable=inconsistent-return-statements _request = build_abort_copy_from_url_request( url=self._config.url, copy_id=copy_id, + version=self._config.version, timeout=timeout, lease_id=_lease_id, request_id_parameter=request_id_parameter, comp=comp, copy_action_abort_constant=copy_action_abort_constant, - version=self._config.version, headers=_headers, params=_params, ) @@ -4133,6 +4151,7 @@ def set_tier( # pylint: disable=inconsistent-return-statements _request = build_set_tier_request( url=self._config.url, tier=tier, + version=self._config.version, snapshot=snapshot, version_id=version_id, timeout=timeout, @@ -4141,7 +4160,6 @@ def set_tier( # pylint: disable=inconsistent-return-statements lease_id=_lease_id, if_tags=_if_tags, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -4208,11 +4226,11 @@ def get_account_info( # pylint: disable=inconsistent-return-statements _request = build_get_account_info_request( url=self._config.url, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, restype=restype, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -4333,6 +4351,7 @@ def query( _request = build_query_request( url=self._config.url, + version=self._config.version, snapshot=snapshot, timeout=timeout, lease_id=_lease_id, @@ -4347,7 +4366,6 @@ def query( request_id_parameter=request_id_parameter, comp=comp, content_type=content_type, - version=self._config.version, content=_content, headers=_headers, params=_params, @@ -4576,6 +4594,7 @@ def get_tags( _request = build_get_tags_request( url=self._config.url, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, snapshot=snapshot, @@ -4587,7 +4606,6 @@ def get_tags( if_match=_if_match, if_none_match=_if_none_match, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -4708,6 +4726,7 @@ def set_tags( # pylint: disable=inconsistent-return-statements _request = build_set_tags_request( url=self._config.url, + version=self._config.version, timeout=timeout, version_id=version_id, transactional_content_md5=transactional_content_md5, @@ -4721,7 +4740,6 @@ def set_tags( # pylint: disable=inconsistent-return-statements if_none_match=_if_none_match, comp=comp, content_type=content_type, - version=self._config.version, content=_content, headers=_headers, params=_params, diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_block_blob_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_block_blob_operations.py index 199e115c1cfc..f4a3ca253280 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_block_blob_operations.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_block_blob_operations.py @@ -35,11 +35,12 @@ _SERIALIZER.client_side_validation = False -def build_upload_request( +def build_upload_request( # pylint: disable=too-many-locals,too-many-statements,too-many-branches url: str, *, content_length: int, content: IO[bytes], + version: str, timeout: Optional[int] = None, transactional_content_md5: Optional[bytes] = None, blob_content_type: Optional[str] = None, @@ -75,7 +76,6 @@ def build_upload_request( blob_type: Literal["BlockBlob"] = kwargs.pop("blob_type", _headers.pop("x-ms-blob-type", "BlockBlob")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -173,11 +173,12 @@ def build_upload_request( return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, content=content, **kwargs) -def build_put_blob_from_url_request( +def build_put_blob_from_url_request( # pylint: disable=too-many-locals,too-many-statements,too-many-branches url: str, *, content_length: int, copy_source: str, + version: str, timeout: Optional[int] = None, transactional_content_md5: Optional[bytes] = None, blob_content_type: Optional[str] = None, @@ -210,13 +211,15 @@ def build_put_blob_from_url_request( copy_source_authorization: Optional[str] = None, copy_source_tags: Optional[Union[str, _models.BlobCopySourceTags]] = None, file_request_intent: Optional[Union[str, _models.FileShareTokenIntent]] = None, + source_encryption_key: Optional[str] = None, + source_encryption_key_sha256: Optional[str] = None, + source_encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) blob_type: Literal["BlockBlob"] = kwargs.pop("blob_type", _headers.pop("x-ms-blob-type", "BlockBlob")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -316,6 +319,18 @@ def build_put_blob_from_url_request( _headers["x-ms-copy-source-tag-option"] = _SERIALIZER.header("copy_source_tags", copy_source_tags, "str") if file_request_intent is not None: _headers["x-ms-file-request-intent"] = _SERIALIZER.header("file_request_intent", file_request_intent, "str") + if source_encryption_key is not None: + _headers["x-ms-source-encryption-key"] = _SERIALIZER.header( + "source_encryption_key", source_encryption_key, "str" + ) + if source_encryption_key_sha256 is not None: + _headers["x-ms-source-encryption-key-sha256"] = _SERIALIZER.header( + "source_encryption_key_sha256", source_encryption_key_sha256, "str" + ) + if source_encryption_algorithm is not None: + _headers["x-ms-source-encryption-algorithm"] = _SERIALIZER.header( + "source_encryption_algorithm", source_encryption_algorithm, "str" + ) _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) @@ -327,6 +342,7 @@ def build_stage_block_request( block_id: str, content_length: int, content: IO[bytes], + version: str, transactional_content_md5: Optional[bytes] = None, transactional_content_crc64: Optional[bytes] = None, timeout: Optional[int] = None, @@ -345,7 +361,6 @@ def build_stage_block_request( comp: Literal["block"] = kwargs.pop("comp", _params.pop("comp", "block")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -400,12 +415,13 @@ def build_stage_block_request( return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, content=content, **kwargs) -def build_stage_block_from_url_request( +def build_stage_block_from_url_request( # pylint: disable=too-many-locals url: str, *, block_id: str, content_length: int, source_url: str, + version: str, source_range: Optional[str] = None, source_content_md5: Optional[bytes] = None, source_contentcrc64: Optional[bytes] = None, @@ -422,13 +438,15 @@ def build_stage_block_from_url_request( request_id_parameter: Optional[str] = None, copy_source_authorization: Optional[str] = None, file_request_intent: Optional[Union[str, _models.FileShareTokenIntent]] = None, + source_encryption_key: Optional[str] = None, + source_encryption_key_sha256: Optional[str] = None, + source_encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) comp: Literal["block"] = kwargs.pop("comp", _params.pop("comp", "block")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -489,15 +507,28 @@ def build_stage_block_from_url_request( ) if file_request_intent is not None: _headers["x-ms-file-request-intent"] = _SERIALIZER.header("file_request_intent", file_request_intent, "str") + if source_encryption_key is not None: + _headers["x-ms-source-encryption-key"] = _SERIALIZER.header( + "source_encryption_key", source_encryption_key, "str" + ) + if source_encryption_key_sha256 is not None: + _headers["x-ms-source-encryption-key-sha256"] = _SERIALIZER.header( + "source_encryption_key_sha256", source_encryption_key_sha256, "str" + ) + if source_encryption_algorithm is not None: + _headers["x-ms-source-encryption-algorithm"] = _SERIALIZER.header( + "source_encryption_algorithm", source_encryption_algorithm, "str" + ) _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) -def build_commit_block_list_request( +def build_commit_block_list_request( # pylint: disable=too-many-locals,too-many-statements,too-many-branches url: str, *, content: Any, + version: str, timeout: Optional[int] = None, blob_cache_control: Optional[str] = None, blob_content_type: Optional[str] = None, @@ -531,7 +562,6 @@ def build_commit_block_list_request( comp: Literal["blocklist"] = kwargs.pop("comp", _params.pop("comp", "blocklist")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -625,6 +655,7 @@ def build_commit_block_list_request( def build_get_block_list_request( url: str, *, + version: str, snapshot: Optional[str] = None, list_type: Union[str, _models.BlockListType] = "committed", timeout: Optional[int] = None, @@ -637,7 +668,6 @@ def build_get_block_list_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) comp: Literal["blocklist"] = kwargs.pop("comp", _params.pop("comp", "blocklist")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -689,7 +719,7 @@ def __init__(self, *args, **kwargs) -> None: self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace - def upload( # pylint: disable=inconsistent-return-statements + def upload( # pylint: disable=inconsistent-return-statements,too-many-locals self, content_length: int, body: IO[bytes], @@ -838,6 +868,7 @@ def upload( # pylint: disable=inconsistent-return-statements _request = build_upload_request( url=self._config.url, content_length=content_length, + version=self._config.version, timeout=timeout, transactional_content_md5=transactional_content_md5, blob_content_type=_blob_content_type, @@ -868,7 +899,6 @@ def upload( # pylint: disable=inconsistent-return-statements structured_content_length=structured_content_length, blob_type=blob_type, content_type=content_type, - version=self._config.version, content=_content, headers=_headers, params=_params, @@ -918,7 +948,7 @@ def upload( # pylint: disable=inconsistent-return-statements return cls(pipeline_response, None, response_headers) # type: ignore @distributed_trace - def put_blob_from_url( # pylint: disable=inconsistent-return-statements + def put_blob_from_url( # pylint: disable=inconsistent-return-statements,too-many-locals self, content_length: int, copy_source: str, @@ -939,6 +969,7 @@ def put_blob_from_url( # pylint: disable=inconsistent-return-statements cpk_scope_info: Optional[_models.CpkScopeInfo] = None, modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, source_modified_access_conditions: Optional[_models.SourceModifiedAccessConditions] = None, + source_cpk_info: Optional[_models.SourceCpkInfo] = None, **kwargs: Any ) -> None: """The Put Blob from URL operation creates a new Block Blob where the contents of the blob are @@ -1009,6 +1040,8 @@ def put_blob_from_url( # pylint: disable=inconsistent-return-statements :param source_modified_access_conditions: Parameter group. Default value is None. :type source_modified_access_conditions: ~azure.storage.blob.models.SourceModifiedAccessConditions + :param source_cpk_info: Parameter group. Default value is None. + :type source_cpk_info: ~azure.storage.blob.models.SourceCpkInfo :return: None or the result of cls(response) :rtype: None :raises ~azure.core.exceptions.HttpResponseError: @@ -1048,6 +1081,9 @@ def put_blob_from_url( # pylint: disable=inconsistent-return-statements _source_if_match = None _source_if_none_match = None _source_if_tags = None + _source_encryption_key = None + _source_encryption_key_sha256 = None + _source_encryption_algorithm = None if blob_http_headers is not None: _blob_cache_control = blob_http_headers.blob_cache_control _blob_content_disposition = blob_http_headers.blob_content_disposition @@ -1075,11 +1111,16 @@ def put_blob_from_url( # pylint: disable=inconsistent-return-statements _source_if_none_match = source_modified_access_conditions.source_if_none_match _source_if_tags = source_modified_access_conditions.source_if_tags _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since + if source_cpk_info is not None: + _source_encryption_algorithm = source_cpk_info.source_encryption_algorithm + _source_encryption_key = source_cpk_info.source_encryption_key + _source_encryption_key_sha256 = source_cpk_info.source_encryption_key_sha256 _request = build_put_blob_from_url_request( url=self._config.url, content_length=content_length, copy_source=copy_source, + version=self._config.version, timeout=timeout, transactional_content_md5=transactional_content_md5, blob_content_type=_blob_content_type, @@ -1112,8 +1153,10 @@ def put_blob_from_url( # pylint: disable=inconsistent-return-statements copy_source_authorization=copy_source_authorization, copy_source_tags=copy_source_tags, file_request_intent=file_request_intent, + source_encryption_key=_source_encryption_key, + source_encryption_key_sha256=_source_encryption_key_sha256, + source_encryption_algorithm=_source_encryption_algorithm, blob_type=blob_type, - version=self._config.version, headers=_headers, params=_params, ) @@ -1159,7 +1202,7 @@ def put_blob_from_url( # pylint: disable=inconsistent-return-statements return cls(pipeline_response, None, response_headers) # type: ignore @distributed_trace - def stage_block( # pylint: disable=inconsistent-return-statements + def stage_block( # pylint: disable=inconsistent-return-statements,too-many-locals self, block_id: str, content_length: int, @@ -1251,6 +1294,7 @@ def stage_block( # pylint: disable=inconsistent-return-statements url=self._config.url, block_id=block_id, content_length=content_length, + version=self._config.version, transactional_content_md5=transactional_content_md5, transactional_content_crc64=transactional_content_crc64, timeout=timeout, @@ -1264,7 +1308,6 @@ def stage_block( # pylint: disable=inconsistent-return-statements structured_content_length=structured_content_length, comp=comp, content_type=content_type, - version=self._config.version, content=_content, headers=_headers, params=_params, @@ -1314,7 +1357,7 @@ def stage_block( # pylint: disable=inconsistent-return-statements return cls(pipeline_response, None, response_headers) # type: ignore @distributed_trace - def stage_block_from_url( # pylint: disable=inconsistent-return-statements + def stage_block_from_url( # pylint: disable=inconsistent-return-statements,too-many-locals self, block_id: str, content_length: int, @@ -1330,6 +1373,7 @@ def stage_block_from_url( # pylint: disable=inconsistent-return-statements cpk_scope_info: Optional[_models.CpkScopeInfo] = None, lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, source_modified_access_conditions: Optional[_models.SourceModifiedAccessConditions] = None, + source_cpk_info: Optional[_models.SourceCpkInfo] = None, **kwargs: Any ) -> None: """The Stage Block operation creates a new block to be committed as part of a blob where the @@ -1374,6 +1418,8 @@ def stage_block_from_url( # pylint: disable=inconsistent-return-statements :param source_modified_access_conditions: Parameter group. Default value is None. :type source_modified_access_conditions: ~azure.storage.blob.models.SourceModifiedAccessConditions + :param source_cpk_info: Parameter group. Default value is None. + :type source_cpk_info: ~azure.storage.blob.models.SourceCpkInfo :return: None or the result of cls(response) :rtype: None :raises ~azure.core.exceptions.HttpResponseError: @@ -1401,6 +1447,9 @@ def stage_block_from_url( # pylint: disable=inconsistent-return-statements _source_if_unmodified_since = None _source_if_match = None _source_if_none_match = None + _source_encryption_key = None + _source_encryption_key_sha256 = None + _source_encryption_algorithm = None if cpk_info is not None: _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key @@ -1414,12 +1463,17 @@ def stage_block_from_url( # pylint: disable=inconsistent-return-statements _source_if_modified_since = source_modified_access_conditions.source_if_modified_since _source_if_none_match = source_modified_access_conditions.source_if_none_match _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since + if source_cpk_info is not None: + _source_encryption_algorithm = source_cpk_info.source_encryption_algorithm + _source_encryption_key = source_cpk_info.source_encryption_key + _source_encryption_key_sha256 = source_cpk_info.source_encryption_key_sha256 _request = build_stage_block_from_url_request( url=self._config.url, block_id=block_id, content_length=content_length, source_url=source_url, + version=self._config.version, source_range=source_range, source_content_md5=source_content_md5, source_contentcrc64=source_contentcrc64, @@ -1436,8 +1490,10 @@ def stage_block_from_url( # pylint: disable=inconsistent-return-statements request_id_parameter=request_id_parameter, copy_source_authorization=copy_source_authorization, file_request_intent=file_request_intent, + source_encryption_key=_source_encryption_key, + source_encryption_key_sha256=_source_encryption_key_sha256, + source_encryption_algorithm=_source_encryption_algorithm, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -1483,7 +1539,7 @@ def stage_block_from_url( # pylint: disable=inconsistent-return-statements return cls(pipeline_response, None, response_headers) # type: ignore @distributed_trace - def commit_block_list( # pylint: disable=inconsistent-return-statements + def commit_block_list( # pylint: disable=inconsistent-return-statements,too-many-locals self, blocks: _models.BlockLookupList, timeout: Optional[int] = None, @@ -1621,6 +1677,7 @@ def commit_block_list( # pylint: disable=inconsistent-return-statements _request = build_commit_block_list_request( url=self._config.url, + version=self._config.version, timeout=timeout, blob_cache_control=_blob_cache_control, blob_content_type=_blob_content_type, @@ -1649,7 +1706,6 @@ def commit_block_list( # pylint: disable=inconsistent-return-statements legal_hold=legal_hold, comp=comp, content_type=content_type, - version=self._config.version, content=_content, headers=_headers, params=_params, @@ -1762,6 +1818,7 @@ def get_block_list( _request = build_get_block_list_request( url=self._config.url, + version=self._config.version, snapshot=snapshot, list_type=list_type, timeout=timeout, @@ -1769,7 +1826,6 @@ def get_block_list( if_tags=_if_tags, request_id_parameter=request_id_parameter, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_container_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_container_operations.py index e1cf86250fc2..ec2deb0de1c0 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_container_operations.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_container_operations.py @@ -40,6 +40,7 @@ def build_create_request( url: str, *, + version: str, timeout: Optional[int] = None, metadata: Optional[dict[str, str]] = None, access: Optional[Union[str, _models.PublicAccessType]] = None, @@ -52,7 +53,6 @@ def build_create_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -92,6 +92,7 @@ def build_create_request( def build_get_properties_request( url: str, *, + version: str, timeout: Optional[int] = None, lease_id: Optional[str] = None, request_id_parameter: Optional[str] = None, @@ -101,7 +102,6 @@ def build_get_properties_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -131,6 +131,7 @@ def build_get_properties_request( def build_delete_request( url: str, *, + version: str, timeout: Optional[int] = None, lease_id: Optional[str] = None, if_modified_since: Optional[datetime.datetime] = None, @@ -142,7 +143,6 @@ def build_delete_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -176,6 +176,7 @@ def build_delete_request( def build_set_metadata_request( url: str, *, + version: str, timeout: Optional[int] = None, lease_id: Optional[str] = None, metadata: Optional[dict[str, str]] = None, @@ -188,7 +189,6 @@ def build_set_metadata_request( restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) comp: Literal["metadata"] = kwargs.pop("comp", _params.pop("comp", "metadata")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -223,6 +223,7 @@ def build_set_metadata_request( def build_get_access_policy_request( url: str, *, + version: str, timeout: Optional[int] = None, lease_id: Optional[str] = None, request_id_parameter: Optional[str] = None, @@ -233,7 +234,6 @@ def build_get_access_policy_request( restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) comp: Literal["acl"] = kwargs.pop("comp", _params.pop("comp", "acl")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -264,6 +264,7 @@ def build_get_access_policy_request( def build_set_access_policy_request( url: str, *, + version: str, timeout: Optional[int] = None, lease_id: Optional[str] = None, access: Optional[Union[str, _models.PublicAccessType]] = None, @@ -279,7 +280,6 @@ def build_set_access_policy_request( restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) comp: Literal["acl"] = kwargs.pop("comp", _params.pop("comp", "acl")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -318,6 +318,7 @@ def build_set_access_policy_request( def build_restore_request( url: str, *, + version: str, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, deleted_container_name: Optional[str] = None, @@ -329,7 +330,6 @@ def build_restore_request( restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) comp: Literal["undelete"] = kwargs.pop("comp", _params.pop("comp", "undelete")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -367,6 +367,7 @@ def build_rename_request( url: str, *, source_container_name: str, + version: str, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, source_lease_id: Optional[str] = None, @@ -377,7 +378,6 @@ def build_rename_request( restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) comp: Literal["rename"] = kwargs.pop("comp", _params.pop("comp", "rename")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -411,6 +411,7 @@ def build_submit_batch_request( *, content_length: int, content: IO[bytes], + version: str, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any @@ -421,7 +422,6 @@ def build_submit_batch_request( restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) comp: Literal["batch"] = kwargs.pop("comp", _params.pop("comp", "batch")) multipart_content_type: Optional[str] = kwargs.pop("multipart_content_type", _headers.pop("Content-Type", None)) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -453,6 +453,7 @@ def build_submit_batch_request( def build_filter_blobs_request( url: str, *, + version: str, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, where: Optional[str] = None, @@ -466,7 +467,6 @@ def build_filter_blobs_request( restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) comp: Literal["blobs"] = kwargs.pop("comp", _params.pop("comp", "blobs")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -503,6 +503,7 @@ def build_filter_blobs_request( def build_acquire_lease_request( url: str, *, + version: str, timeout: Optional[int] = None, duration: Optional[int] = None, proposed_lease_id: Optional[str] = None, @@ -517,7 +518,6 @@ def build_acquire_lease_request( comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) action: Literal["acquire"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "acquire")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -556,6 +556,7 @@ def build_release_lease_request( url: str, *, lease_id: str, + version: str, timeout: Optional[int] = None, if_modified_since: Optional[datetime.datetime] = None, if_unmodified_since: Optional[datetime.datetime] = None, @@ -568,7 +569,6 @@ def build_release_lease_request( comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) action: Literal["release"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "release")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -604,6 +604,7 @@ def build_renew_lease_request( url: str, *, lease_id: str, + version: str, timeout: Optional[int] = None, if_modified_since: Optional[datetime.datetime] = None, if_unmodified_since: Optional[datetime.datetime] = None, @@ -616,7 +617,6 @@ def build_renew_lease_request( comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) action: Literal["renew"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "renew")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -651,6 +651,7 @@ def build_renew_lease_request( def build_break_lease_request( url: str, *, + version: str, timeout: Optional[int] = None, break_period: Optional[int] = None, if_modified_since: Optional[datetime.datetime] = None, @@ -664,7 +665,6 @@ def build_break_lease_request( comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) action: Literal["break"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "break")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -702,6 +702,7 @@ def build_change_lease_request( *, lease_id: str, proposed_lease_id: str, + version: str, timeout: Optional[int] = None, if_modified_since: Optional[datetime.datetime] = None, if_unmodified_since: Optional[datetime.datetime] = None, @@ -714,7 +715,6 @@ def build_change_lease_request( comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) action: Literal["change"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "change")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -750,6 +750,7 @@ def build_change_lease_request( def build_list_blob_flat_segment_request( url: str, *, + version: str, prefix: Optional[str] = None, marker: Optional[str] = None, maxresults: Optional[int] = None, @@ -764,7 +765,6 @@ def build_list_blob_flat_segment_request( restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) comp: Literal["list"] = kwargs.pop("comp", _params.pop("comp", "list")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -804,6 +804,7 @@ def build_list_blob_hierarchy_segment_request( # pylint: disable=name-too-long url: str, *, delimiter: str, + version: str, prefix: Optional[str] = None, marker: Optional[str] = None, maxresults: Optional[int] = None, @@ -818,7 +819,6 @@ def build_list_blob_hierarchy_segment_request( # pylint: disable=name-too-long restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) comp: Literal["list"] = kwargs.pop("comp", _params.pop("comp", "list")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -856,14 +856,13 @@ def build_list_blob_hierarchy_segment_request( # pylint: disable=name-too-long def build_get_account_info_request( - url: str, *, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any + url: str, *, version: str, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) restype: Literal["account"] = kwargs.pop("restype", _params.pop("restype", "account")) comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -969,6 +968,7 @@ def create( # pylint: disable=inconsistent-return-statements _request = build_create_request( url=self._config.url, + version=self._config.version, timeout=timeout, metadata=metadata, access=access, @@ -976,7 +976,6 @@ def create( # pylint: disable=inconsistent-return-statements default_encryption_scope=_default_encryption_scope, prevent_encryption_scope_override=_prevent_encryption_scope_override, restype=restype, - version=self._config.version, headers=_headers, params=_params, ) @@ -1056,11 +1055,11 @@ def get_properties( # pylint: disable=inconsistent-return-statements _request = build_get_properties_request( url=self._config.url, + version=self._config.version, timeout=timeout, lease_id=_lease_id, request_id_parameter=request_id_parameter, restype=restype, - version=self._config.version, headers=_headers, params=_params, ) @@ -1168,13 +1167,13 @@ def delete( # pylint: disable=inconsistent-return-statements _request = build_delete_request( url=self._config.url, + version=self._config.version, timeout=timeout, lease_id=_lease_id, if_modified_since=_if_modified_since, if_unmodified_since=_if_unmodified_since, request_id_parameter=request_id_parameter, restype=restype, - version=self._config.version, headers=_headers, params=_params, ) @@ -1267,6 +1266,7 @@ def set_metadata( # pylint: disable=inconsistent-return-statements _request = build_set_metadata_request( url=self._config.url, + version=self._config.version, timeout=timeout, lease_id=_lease_id, metadata=metadata, @@ -1274,7 +1274,6 @@ def set_metadata( # pylint: disable=inconsistent-return-statements request_id_parameter=request_id_parameter, restype=restype, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -1355,12 +1354,12 @@ def get_access_policy( _request = build_get_access_policy_request( url=self._config.url, + version=self._config.version, timeout=timeout, lease_id=_lease_id, request_id_parameter=request_id_parameter, restype=restype, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -1472,6 +1471,7 @@ def set_access_policy( # pylint: disable=inconsistent-return-statements _request = build_set_access_policy_request( url=self._config.url, + version=self._config.version, timeout=timeout, lease_id=_lease_id, access=access, @@ -1481,7 +1481,6 @@ def set_access_policy( # pylint: disable=inconsistent-return-statements restype=restype, comp=comp, content_type=content_type, - version=self._config.version, content=_content, headers=_headers, params=_params, @@ -1563,13 +1562,13 @@ def restore( # pylint: disable=inconsistent-return-statements _request = build_restore_request( url=self._config.url, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, deleted_container_name=deleted_container_name, deleted_container_version=deleted_container_version, restype=restype, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -1649,12 +1648,12 @@ def rename( # pylint: disable=inconsistent-return-statements _request = build_rename_request( url=self._config.url, source_container_name=source_container_name, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, source_lease_id=source_lease_id, restype=restype, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -1737,12 +1736,12 @@ def submit_batch( _request = build_submit_batch_request( url=self._config.url, content_length=content_length, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, restype=restype, comp=comp, multipart_content_type=multipart_content_type, - version=self._config.version, content=_content, headers=_headers, params=_params, @@ -1845,6 +1844,7 @@ def filter_blobs( _request = build_filter_blobs_request( url=self._config.url, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, where=where, @@ -1853,7 +1853,6 @@ def filter_blobs( include=include, restype=restype, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -1949,6 +1948,7 @@ def acquire_lease( # pylint: disable=inconsistent-return-statements _request = build_acquire_lease_request( url=self._config.url, + version=self._config.version, timeout=timeout, duration=duration, proposed_lease_id=proposed_lease_id, @@ -1958,7 +1958,6 @@ def acquire_lease( # pylint: disable=inconsistent-return-statements comp=comp, restype=restype, action=action, - version=self._config.version, headers=_headers, params=_params, ) @@ -2047,6 +2046,7 @@ def release_lease( # pylint: disable=inconsistent-return-statements _request = build_release_lease_request( url=self._config.url, lease_id=lease_id, + version=self._config.version, timeout=timeout, if_modified_since=_if_modified_since, if_unmodified_since=_if_unmodified_since, @@ -2054,7 +2054,6 @@ def release_lease( # pylint: disable=inconsistent-return-statements comp=comp, restype=restype, action=action, - version=self._config.version, headers=_headers, params=_params, ) @@ -2142,6 +2141,7 @@ def renew_lease( # pylint: disable=inconsistent-return-statements _request = build_renew_lease_request( url=self._config.url, lease_id=lease_id, + version=self._config.version, timeout=timeout, if_modified_since=_if_modified_since, if_unmodified_since=_if_unmodified_since, @@ -2149,7 +2149,6 @@ def renew_lease( # pylint: disable=inconsistent-return-statements comp=comp, restype=restype, action=action, - version=self._config.version, headers=_headers, params=_params, ) @@ -2243,6 +2242,7 @@ def break_lease( # pylint: disable=inconsistent-return-statements _request = build_break_lease_request( url=self._config.url, + version=self._config.version, timeout=timeout, break_period=break_period, if_modified_since=_if_modified_since, @@ -2251,7 +2251,6 @@ def break_lease( # pylint: disable=inconsistent-return-statements comp=comp, restype=restype, action=action, - version=self._config.version, headers=_headers, params=_params, ) @@ -2346,6 +2345,7 @@ def change_lease( # pylint: disable=inconsistent-return-statements url=self._config.url, lease_id=lease_id, proposed_lease_id=proposed_lease_id, + version=self._config.version, timeout=timeout, if_modified_since=_if_modified_since, if_unmodified_since=_if_unmodified_since, @@ -2353,7 +2353,6 @@ def change_lease( # pylint: disable=inconsistent-return-statements comp=comp, restype=restype, action=action, - version=self._config.version, headers=_headers, params=_params, ) @@ -2456,6 +2455,7 @@ def list_blob_flat_segment( _request = build_list_blob_flat_segment_request( url=self._config.url, + version=self._config.version, prefix=prefix, marker=marker, maxresults=maxresults, @@ -2465,7 +2465,6 @@ def list_blob_flat_segment( request_id_parameter=request_id_parameter, restype=restype, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -2577,6 +2576,7 @@ def list_blob_hierarchy_segment( _request = build_list_blob_hierarchy_segment_request( url=self._config.url, delimiter=delimiter, + version=self._config.version, prefix=prefix, marker=marker, maxresults=maxresults, @@ -2586,7 +2586,6 @@ def list_blob_hierarchy_segment( request_id_parameter=request_id_parameter, restype=restype, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -2659,11 +2658,11 @@ def get_account_info( # pylint: disable=inconsistent-return-statements _request = build_get_account_info_request( url=self._config.url, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, restype=restype, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_page_blob_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_page_blob_operations.py index d37595b14026..5b1d83541194 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_page_blob_operations.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_page_blob_operations.py @@ -35,11 +35,12 @@ _SERIALIZER.client_side_validation = False -def build_create_request( +def build_create_request( # pylint: disable=too-many-locals url: str, *, content_length: int, blob_content_length: int, + version: str, timeout: Optional[int] = None, tier: Optional[Union[str, _models.PremiumPageBlobAccessTier]] = None, blob_content_type: Optional[str] = None, @@ -71,7 +72,6 @@ def build_create_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) blob_type: Literal["PageBlob"] = kwargs.pop("blob_type", _headers.pop("x-ms-blob-type", "PageBlob")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -156,11 +156,12 @@ def build_create_request( return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) -def build_upload_pages_request( +def build_upload_pages_request( # pylint: disable=too-many-locals url: str, *, content_length: int, content: IO[bytes], + version: str, transactional_content_md5: Optional[bytes] = None, transactional_content_crc64: Optional[bytes] = None, timeout: Optional[int] = None, @@ -189,7 +190,6 @@ def build_upload_pages_request( comp: Literal["page"] = kwargs.pop("comp", _params.pop("comp", "page")) page_write: Literal["update"] = kwargs.pop("page_write", _headers.pop("x-ms-page-write", "update")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -268,10 +268,11 @@ def build_upload_pages_request( return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, content=content, **kwargs) -def build_clear_pages_request( +def build_clear_pages_request( # pylint: disable=too-many-locals url: str, *, content_length: int, + version: str, timeout: Optional[int] = None, range: Optional[str] = None, lease_id: Optional[str] = None, @@ -295,7 +296,6 @@ def build_clear_pages_request( comp: Literal["page"] = kwargs.pop("comp", _params.pop("comp", "page")) page_write: Literal["clear"] = kwargs.pop("page_write", _headers.pop("x-ms-page-write", "clear")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -358,13 +358,14 @@ def build_clear_pages_request( return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) -def build_upload_pages_from_url_request( +def build_upload_pages_from_url_request( # pylint: disable=too-many-locals,too-many-statements,too-many-branches url: str, *, source_url: str, source_range: str, content_length: int, range: str, + version: str, source_content_md5: Optional[bytes] = None, source_contentcrc64: Optional[bytes] = None, timeout: Optional[int] = None, @@ -388,6 +389,9 @@ def build_upload_pages_from_url_request( request_id_parameter: Optional[str] = None, copy_source_authorization: Optional[str] = None, file_request_intent: Optional[Union[str, _models.FileShareTokenIntent]] = None, + source_encryption_key: Optional[str] = None, + source_encryption_key_sha256: Optional[str] = None, + source_encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) @@ -395,7 +399,6 @@ def build_upload_pages_from_url_request( comp: Literal["page"] = kwargs.pop("comp", _params.pop("comp", "page")) page_write: Literal["update"] = kwargs.pop("page_write", _headers.pop("x-ms-page-write", "update")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -478,6 +481,18 @@ def build_upload_pages_from_url_request( ) if file_request_intent is not None: _headers["x-ms-file-request-intent"] = _SERIALIZER.header("file_request_intent", file_request_intent, "str") + if source_encryption_key is not None: + _headers["x-ms-source-encryption-key"] = _SERIALIZER.header( + "source_encryption_key", source_encryption_key, "str" + ) + if source_encryption_key_sha256 is not None: + _headers["x-ms-source-encryption-key-sha256"] = _SERIALIZER.header( + "source_encryption_key_sha256", source_encryption_key_sha256, "str" + ) + if source_encryption_algorithm is not None: + _headers["x-ms-source-encryption-algorithm"] = _SERIALIZER.header( + "source_encryption_algorithm", source_encryption_algorithm, "str" + ) _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) @@ -486,6 +501,7 @@ def build_upload_pages_from_url_request( def build_get_page_ranges_request( url: str, *, + version: str, snapshot: Optional[str] = None, timeout: Optional[int] = None, range: Optional[str] = None, @@ -504,7 +520,6 @@ def build_get_page_ranges_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) comp: Literal["pagelist"] = kwargs.pop("comp", _params.pop("comp", "pagelist")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -552,6 +567,7 @@ def build_get_page_ranges_request( def build_get_page_ranges_diff_request( url: str, *, + version: str, snapshot: Optional[str] = None, timeout: Optional[int] = None, prevsnapshot: Optional[str] = None, @@ -572,7 +588,6 @@ def build_get_page_ranges_diff_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) comp: Literal["pagelist"] = kwargs.pop("comp", _params.pop("comp", "pagelist")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -625,6 +640,7 @@ def build_resize_request( url: str, *, blob_content_length: int, + version: str, timeout: Optional[int] = None, lease_id: Optional[str] = None, encryption_key: Optional[str] = None, @@ -643,7 +659,6 @@ def build_resize_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -695,6 +710,7 @@ def build_update_sequence_number_request( url: str, *, sequence_number_action: Union[str, _models.SequenceNumberActionType], + version: str, timeout: Optional[int] = None, lease_id: Optional[str] = None, if_modified_since: Optional[datetime.datetime] = None, @@ -710,7 +726,6 @@ def build_update_sequence_number_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -756,6 +771,7 @@ def build_copy_incremental_request( url: str, *, copy_source: str, + version: str, timeout: Optional[int] = None, if_modified_since: Optional[datetime.datetime] = None, if_unmodified_since: Optional[datetime.datetime] = None, @@ -769,7 +785,6 @@ def build_copy_incremental_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) comp: Literal["incrementalcopy"] = kwargs.pop("comp", _params.pop("comp", "incrementalcopy")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -825,7 +840,7 @@ def __init__(self, *args, **kwargs) -> None: self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace - def create( # pylint: disable=inconsistent-return-statements + def create( # pylint: disable=inconsistent-return-statements,too-many-locals self, content_length: int, blob_content_length: int, @@ -957,6 +972,7 @@ def create( # pylint: disable=inconsistent-return-statements url=self._config.url, content_length=content_length, blob_content_length=blob_content_length, + version=self._config.version, timeout=timeout, tier=tier, blob_content_type=_blob_content_type, @@ -983,7 +999,6 @@ def create( # pylint: disable=inconsistent-return-statements immutability_policy_mode=immutability_policy_mode, legal_hold=legal_hold, blob_type=blob_type, - version=self._config.version, headers=_headers, params=_params, ) @@ -1029,7 +1044,7 @@ def create( # pylint: disable=inconsistent-return-statements return cls(pipeline_response, None, response_headers) # type: ignore @distributed_trace - def upload_pages( # pylint: disable=inconsistent-return-statements + def upload_pages( # pylint: disable=inconsistent-return-statements,too-many-locals self, content_length: int, body: IO[bytes], @@ -1146,6 +1161,7 @@ def upload_pages( # pylint: disable=inconsistent-return-statements _request = build_upload_pages_request( url=self._config.url, content_length=content_length, + version=self._config.version, transactional_content_md5=transactional_content_md5, transactional_content_crc64=transactional_content_crc64, timeout=timeout, @@ -1169,7 +1185,6 @@ def upload_pages( # pylint: disable=inconsistent-return-statements comp=comp, page_write=page_write, content_type=content_type, - version=self._config.version, content=_content, headers=_headers, params=_params, @@ -1319,6 +1334,7 @@ def clear_pages( # pylint: disable=inconsistent-return-statements _request = build_clear_pages_request( url=self._config.url, content_length=content_length, + version=self._config.version, timeout=timeout, range=range, lease_id=_lease_id, @@ -1337,7 +1353,6 @@ def clear_pages( # pylint: disable=inconsistent-return-statements request_id_parameter=request_id_parameter, comp=comp, page_write=page_write, - version=self._config.version, headers=_headers, params=_params, ) @@ -1379,7 +1394,7 @@ def clear_pages( # pylint: disable=inconsistent-return-statements return cls(pipeline_response, None, response_headers) # type: ignore @distributed_trace - def upload_pages_from_url( # pylint: disable=inconsistent-return-statements + def upload_pages_from_url( # pylint: disable=inconsistent-return-statements,too-many-locals self, source_url: str, source_range: str, @@ -1397,6 +1412,7 @@ def upload_pages_from_url( # pylint: disable=inconsistent-return-statements sequence_number_access_conditions: Optional[_models.SequenceNumberAccessConditions] = None, modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, source_modified_access_conditions: Optional[_models.SourceModifiedAccessConditions] = None, + source_cpk_info: Optional[_models.SourceCpkInfo] = None, **kwargs: Any ) -> None: """The Upload Pages operation writes a range of pages to a page blob where the contents are read @@ -1446,6 +1462,8 @@ def upload_pages_from_url( # pylint: disable=inconsistent-return-statements :param source_modified_access_conditions: Parameter group. Default value is None. :type source_modified_access_conditions: ~azure.storage.blob.models.SourceModifiedAccessConditions + :param source_cpk_info: Parameter group. Default value is None. + :type source_cpk_info: ~azure.storage.blob.models.SourceCpkInfo :return: None or the result of cls(response) :rtype: None :raises ~azure.core.exceptions.HttpResponseError: @@ -1482,6 +1500,9 @@ def upload_pages_from_url( # pylint: disable=inconsistent-return-statements _source_if_unmodified_since = None _source_if_match = None _source_if_none_match = None + _source_encryption_key = None + _source_encryption_key_sha256 = None + _source_encryption_algorithm = None if cpk_info is not None: _encryption_algorithm = cpk_info.encryption_algorithm _encryption_key = cpk_info.encryption_key @@ -1507,6 +1528,10 @@ def upload_pages_from_url( # pylint: disable=inconsistent-return-statements _source_if_modified_since = source_modified_access_conditions.source_if_modified_since _source_if_none_match = source_modified_access_conditions.source_if_none_match _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since + if source_cpk_info is not None: + _source_encryption_algorithm = source_cpk_info.source_encryption_algorithm + _source_encryption_key = source_cpk_info.source_encryption_key + _source_encryption_key_sha256 = source_cpk_info.source_encryption_key_sha256 _request = build_upload_pages_from_url_request( url=self._config.url, @@ -1514,6 +1539,7 @@ def upload_pages_from_url( # pylint: disable=inconsistent-return-statements source_range=source_range, content_length=content_length, range=range, + version=self._config.version, source_content_md5=source_content_md5, source_contentcrc64=source_contentcrc64, timeout=timeout, @@ -1537,9 +1563,11 @@ def upload_pages_from_url( # pylint: disable=inconsistent-return-statements request_id_parameter=request_id_parameter, copy_source_authorization=copy_source_authorization, file_request_intent=file_request_intent, + source_encryption_key=_source_encryption_key, + source_encryption_key_sha256=_source_encryption_key_sha256, + source_encryption_algorithm=_source_encryption_algorithm, comp=comp, page_write=page_write, - version=self._config.version, headers=_headers, params=_params, ) @@ -1672,6 +1700,7 @@ def get_page_ranges( _request = build_get_page_ranges_request( url=self._config.url, + version=self._config.version, snapshot=snapshot, timeout=timeout, range=range, @@ -1685,7 +1714,6 @@ def get_page_ranges( marker=marker, maxresults=maxresults, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -1826,6 +1854,7 @@ def get_page_ranges_diff( _request = build_get_page_ranges_diff_request( url=self._config.url, + version=self._config.version, snapshot=snapshot, timeout=timeout, prevsnapshot=prevsnapshot, @@ -1841,7 +1870,6 @@ def get_page_ranges_diff( marker=marker, maxresults=maxresults, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -1962,6 +1990,7 @@ def resize( # pylint: disable=inconsistent-return-statements _request = build_resize_request( url=self._config.url, blob_content_length=blob_content_length, + version=self._config.version, timeout=timeout, lease_id=_lease_id, encryption_key=_encryption_key, @@ -1975,7 +2004,6 @@ def resize( # pylint: disable=inconsistent-return-statements if_tags=_if_tags, request_id_parameter=request_id_parameter, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -2083,6 +2111,7 @@ def update_sequence_number( # pylint: disable=inconsistent-return-statements _request = build_update_sequence_number_request( url=self._config.url, sequence_number_action=sequence_number_action, + version=self._config.version, timeout=timeout, lease_id=_lease_id, if_modified_since=_if_modified_since, @@ -2093,7 +2122,6 @@ def update_sequence_number( # pylint: disable=inconsistent-return-statements blob_sequence_number=blob_sequence_number, request_id_parameter=request_id_parameter, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -2194,6 +2222,7 @@ def copy_incremental( # pylint: disable=inconsistent-return-statements _request = build_copy_incremental_request( url=self._config.url, copy_source=copy_source, + version=self._config.version, timeout=timeout, if_modified_since=_if_modified_since, if_unmodified_since=_if_unmodified_since, @@ -2202,7 +2231,6 @@ def copy_incremental( # pylint: disable=inconsistent-return-statements if_tags=_if_tags, request_id_parameter=request_id_parameter, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_service_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_service_operations.py index e21be97dc66d..5edf48262218 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_service_operations.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_service_operations.py @@ -37,7 +37,13 @@ def build_set_properties_request( - url: str, *, content: Any, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any + url: str, + *, + content: Any, + version: str, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) @@ -45,7 +51,6 @@ def build_set_properties_request( restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service")) comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -74,14 +79,13 @@ def build_set_properties_request( def build_get_properties_request( - url: str, *, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any + url: str, *, version: str, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service")) comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -108,14 +112,13 @@ def build_get_properties_request( def build_get_statistics_request( - url: str, *, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any + url: str, *, version: str, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service")) comp: Literal["stats"] = kwargs.pop("comp", _params.pop("comp", "stats")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -144,6 +147,7 @@ def build_get_statistics_request( def build_list_containers_segment_request( url: str, *, + version: str, prefix: Optional[str] = None, marker: Optional[str] = None, maxresults: Optional[int] = None, @@ -156,7 +160,6 @@ def build_list_containers_segment_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) comp: Literal["list"] = kwargs.pop("comp", _params.pop("comp", "list")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -190,7 +193,13 @@ def build_list_containers_segment_request( def build_get_user_delegation_key_request( - url: str, *, content: Any, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any + url: str, + *, + content: Any, + version: str, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) @@ -198,7 +207,6 @@ def build_get_user_delegation_key_request( restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service")) comp: Literal["userdelegationkey"] = kwargs.pop("comp", _params.pop("comp", "userdelegationkey")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -227,14 +235,13 @@ def build_get_user_delegation_key_request( def build_get_account_info_request( - url: str, *, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any + url: str, *, version: str, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) restype: Literal["account"] = kwargs.pop("restype", _params.pop("restype", "account")) comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -265,6 +272,7 @@ def build_submit_batch_request( *, content_length: int, content: IO[bytes], + version: str, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any @@ -274,7 +282,6 @@ def build_submit_batch_request( comp: Literal["batch"] = kwargs.pop("comp", _params.pop("comp", "batch")) multipart_content_type: Optional[str] = kwargs.pop("multipart_content_type", _headers.pop("Content-Type", None)) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -305,6 +312,7 @@ def build_submit_batch_request( def build_filter_blobs_request( url: str, *, + version: str, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, where: Optional[str] = None, @@ -317,7 +325,6 @@ def build_filter_blobs_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) comp: Literal["blobs"] = kwargs.pop("comp", _params.pop("comp", "blobs")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -415,12 +422,12 @@ def set_properties( # pylint: disable=inconsistent-return-statements _request = build_set_properties_request( url=self._config.url, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, restype=restype, comp=comp, content_type=content_type, - version=self._config.version, content=_content, headers=_headers, params=_params, @@ -489,11 +496,11 @@ def get_properties( _request = build_get_properties_request( url=self._config.url, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, restype=restype, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -566,11 +573,11 @@ def get_statistics( _request = build_get_statistics_request( url=self._config.url, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, restype=restype, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -669,6 +676,7 @@ def list_containers_segment( _request = build_list_containers_segment_request( url=self._config.url, + version=self._config.version, prefix=prefix, marker=marker, maxresults=maxresults, @@ -676,7 +684,6 @@ def list_containers_segment( timeout=timeout, request_id_parameter=request_id_parameter, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -757,12 +764,12 @@ def get_user_delegation_key( _request = build_get_user_delegation_key_request( url=self._config.url, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, restype=restype, comp=comp, content_type=content_type, - version=self._config.version, content=_content, headers=_headers, params=_params, @@ -835,11 +842,11 @@ def get_account_info( # pylint: disable=inconsistent-return-statements _request = build_get_account_info_request( url=self._config.url, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, restype=restype, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -924,11 +931,11 @@ def submit_batch( _request = build_submit_batch_request( url=self._config.url, content_length=content_length, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, comp=comp, multipart_content_type=multipart_content_type, - version=self._config.version, content=_content, headers=_headers, params=_params, @@ -1031,6 +1038,7 @@ def filter_blobs( _request = build_filter_blobs_request( url=self._config.url, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, where=where, @@ -1038,7 +1046,6 @@ def filter_blobs( maxresults=maxresults, include=include, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_serialize.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_serialize.py index bc290bfe2d08..c0512fb18748 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_serialize.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_serialize.py @@ -63,6 +63,7 @@ '2025-07-05', '2025-11-05', '2026-02-06', + '2026-04-06', ] diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/models.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/models.py index 55284fda5f78..23786baef24b 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/models.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/models.py @@ -89,6 +89,8 @@ class StorageErrorCode(str, Enum, metaclass=CaseInsensitiveEnumMeta): COPY_ID_MISMATCH = "CopyIdMismatch" FEATURE_VERSION_MISMATCH = "FeatureVersionMismatch" INCREMENTAL_COPY_BLOB_MISMATCH = "IncrementalCopyBlobMismatch" + INCREMENTAL_COPY_OF_EARLIER_SNAPSHOT_NOT_ALLOWED = "IncrementalCopyOfEarlierSnapshotNotAllowed" + #: Deprecated: Please use INCREMENTAL_COPY_OF_EARLIER_SNAPSHOT_NOT_ALLOWED instead. INCREMENTAL_COPY_OF_EARLIER_VERSION_SNAPSHOT_NOT_ALLOWED = "IncrementalCopyOfEarlierVersionSnapshotNotAllowed" #: Deprecated: Please use INCREMENTAL_COPY_OF_EARLIER_VERSION_SNAPSHOT_NOT_ALLOWED instead. INCREMENTAL_COPY_OF_ERALIER_VERSION_SNAPSHOT_NOT_ALLOWED = "IncrementalCopyOfEarlierVersionSnapshotNotAllowed" @@ -153,11 +155,15 @@ class StorageErrorCode(str, Enum, metaclass=CaseInsensitiveEnumMeta): # File values CANNOT_DELETE_FILE_OR_DIRECTORY = "CannotDeleteFileOrDirectory" CLIENT_CACHE_FLUSH_DELAY = "ClientCacheFlushDelay" + CONTAINER_QUOTA_DOWNGRADE_NOT_ALLOWED = "ContainerQuotaDowngradeNotAllowed" DELETE_PENDING = "DeletePending" DIRECTORY_NOT_EMPTY = "DirectoryNotEmpty" FILE_LOCK_CONFLICT = "FileLockConflict" FILE_SHARE_PROVISIONED_BANDWIDTH_DOWNGRADE_NOT_ALLOWED = "FileShareProvisionedBandwidthDowngradeNotAllowed" + FILE_SHARE_PROVISIONED_BANDWIDTH_INVALID = "FileShareProvisionedBandwidthInvalid" FILE_SHARE_PROVISIONED_IOPS_DOWNGRADE_NOT_ALLOWED = "FileShareProvisionedIopsDowngradeNotAllowed" + FILE_SHARE_PROVISIONED_IOPS_INVALID = "FileShareProvisionedIopsInvalid" + FILE_SHARE_PROVISIONED_STORAGE_INVALID = "FileShareProvisionedStorageInvalid" INVALID_FILE_OR_DIRECTORY_PATH_NAME = "InvalidFileOrDirectoryPathName" PARENT_NOT_FOUND = "ParentNotFound" READ_ONLY_ATTRIBUTE = "ReadOnlyAttribute" @@ -171,7 +177,10 @@ class StorageErrorCode(str, Enum, metaclass=CaseInsensitiveEnumMeta): SHARE_SNAPSHOT_NOT_FOUND = "ShareSnapshotNotFound" SHARE_SNAPSHOT_OPERATION_NOT_SUPPORTED = "ShareSnapshotOperationNotSupported" SHARE_HAS_SNAPSHOTS = "ShareHasSnapshots" - CONTAINER_QUOTA_DOWNGRADE_NOT_ALLOWED = "ContainerQuotaDowngradeNotAllowed" + TOTAL_SHARES_PROVISIONED_CAPACITY_EXCEEDS_ACCOUNT_LIMIT = "TotalSharesProvisionedCapacityExceedsAccountLimit" + TOTAL_SHARES_PROVISIONED_IOPS_EXCEEDS_ACCOUNT_LIMIT = "TotalSharesProvisionedIopsExceedsAccountLimit" + TOTAL_SHARES_PROVISIONED_BANDWIDTH_EXCEEDS_ACCOUNT_LIMIT = "TotalSharesProvisionedBandwidthExceedsAccountLimit" + TOTAL_SHARES_COUNT_EXCEEDS_ACCOUNT_LIMIT = "TotalSharesCountExceedsAccountLimit" # DataLake values CONTENT_LENGTH_MUST_BE_ZERO = "ContentLengthMustBeZero" @@ -517,6 +526,8 @@ class UserDelegationKey(object): """Object ID of this token.""" signed_tid: Optional[str] = None """Tenant ID of the tenant that issued this token.""" + signed_delegated_user_tid: Optional[str] = None + """User Tenant ID of this token.""" signed_start: Optional[str] = None """The datetime this token becomes valid.""" signed_expiry: Optional[str] = None @@ -531,6 +542,7 @@ class UserDelegationKey(object): def __init__(self): self.signed_oid = None self.signed_tid = None + self.signed_delegated_user_tid = None self.signed_start = None self.signed_expiry = None self.signed_service = None diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/response_handlers.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/response_handlers.py index 750838e3129a..9a079c56404f 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/response_handlers.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/response_handlers.py @@ -201,6 +201,7 @@ def parse_to_internal_user_delegation_key(service_user_delegation_key): internal_user_delegation_key = UserDelegationKey() internal_user_delegation_key.signed_oid = service_user_delegation_key.signed_oid internal_user_delegation_key.signed_tid = service_user_delegation_key.signed_tid + internal_user_delegation_key.signed_delegated_user_tid = service_user_delegation_key.signed_delegated_user_tid internal_user_delegation_key.signed_start = _to_utc_datetime(service_user_delegation_key.signed_start) internal_user_delegation_key.signed_expiry = _to_utc_datetime(service_user_delegation_key.signed_expiry) internal_user_delegation_key.signed_service = service_user_delegation_key.signed_service diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/shared_access_signature.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/shared_access_signature.py index 0dae7284d490..0f7016f11d96 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/shared_access_signature.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/shared_access_signature.py @@ -42,6 +42,8 @@ class QueryStringConstants(object): SIGNED_KEY_SERVICE = "sks" SIGNED_KEY_VERSION = "skv" SIGNED_ENCRYPTION_SCOPE = "ses" + SIGNED_REQUEST_HEADERS = "srh" + SIGNED_REQUEST_QUERY_PARAMS = "srq" SIGNED_KEY_DELEGATED_USER_TID = "skdutid" SIGNED_DELEGATED_USER_OID = "sduoid" @@ -81,6 +83,8 @@ def to_list(): QueryStringConstants.SIGNED_KEY_SERVICE, QueryStringConstants.SIGNED_KEY_VERSION, QueryStringConstants.SIGNED_ENCRYPTION_SCOPE, + QueryStringConstants.SIGNED_REQUEST_HEADERS, + QueryStringConstants.SIGNED_REQUEST_QUERY_PARAMS, QueryStringConstants.SIGNED_KEY_DELEGATED_USER_TID, QueryStringConstants.SIGNED_DELEGATED_USER_OID, # for ADLS @@ -182,6 +186,10 @@ def __init__(self): self.query_dict = {} self.string_to_sign = "" + # STS-only values for dynamic user delegation SAS + self._sts_srh = "" # newline-delimited "k:v" + trailing newline (or empty) + self._sts_srq = "" # newline-delimited "k:v" + leading newline (or empty) + def _add_query(self, name, val): if val: self.query_dict[name] = str(val) if val is not None else None @@ -225,6 +233,28 @@ def add_override_response_headers( self._add_query(QueryStringConstants.SIGNED_CONTENT_LANGUAGE, content_language) self._add_query(QueryStringConstants.SIGNED_CONTENT_TYPE, content_type) + def add_request_headers(self, request_headers): + if not request_headers: + return + + # String-to-Sign (not encoded): "k1:v1\nk2:v2\n...kn:vn\n" + self._sts_srh = "\n".join([f"{k}:{v}" for k, v in request_headers.items()]) + "\n" + + # SAS query param: comma-separated list of encoded header keys only + srh_keys = ",".join([url_quote(k) for k in request_headers.keys()]) + self._add_query(QueryStringConstants.SIGNED_REQUEST_HEADERS, srh_keys) + + def add_request_query_params(self, request_query_params): + if not request_query_params: + return + + # String-to-Sign (not encoded): "k1:v1\nk2:v2\n...kn:vn\n" + self._sts_srq = "\n" + "\n".join([f"{k}:{v}" for k, v in request_query_params.items()]) + + # SAS query param: comma-separated list of encoded query-param keys only + srq_keys = ",".join([url_quote(k) for k in request_query_params.keys()]) + self._add_query(QueryStringConstants.SIGNED_REQUEST_QUERY_PARAMS, srq_keys) + def add_account_signature(self, account_name, account_key): def get_value_to_append(query): return_value = self.query_dict.get(query) or "" diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared_access_signature.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared_access_signature.py index a3003754ec95..5298d40eaf34 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared_access_signature.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared_access_signature.py @@ -6,7 +6,7 @@ # pylint: disable=docstring-keyword-should-match-keyword-only from typing import ( - Any, Callable, Optional, Union, + Any, Callable, Dict, Optional, Union, TYPE_CHECKING ) from urllib.parse import parse_qs @@ -68,6 +68,8 @@ def generate_blob( content_language: Optional[str] = None, content_type: Optional[str] = None, user_delegation_oid: Optional[str] = None, + request_headers: Optional[Dict[str, str]] = None, + request_query_params: Optional[Dict[str, str]] = None, sts_hook: Optional[Callable[[str], None]] = None, **kwargs: Any ) -> str: @@ -141,6 +143,12 @@ def generate_blob( Specifies the Entra ID of the user that is authorized to use the resulting SAS URL. The resulting SAS URL must be used in conjunction with an Entra ID token that has been issued to the user specified in this value. + :param Dict[str, str] request_headers: + Specifies a set of headers and their corresponding values that + must be present in the request when using this SAS. + :param Dict[str, str] request_query_params: + Specifies a set of query parameters and their corresponding values that + must be present in the request when using this SAS. :param sts_hook: For debugging purposes only. If provided, the hook is called with the string to sign that was used to generate the SAS. @@ -166,8 +174,14 @@ def generate_blob( content_type) sas.add_encryption_scope(**kwargs) sas.add_info_for_hns_account(**kwargs) - sas.add_resource_signature(self.account_name, self.account_key, resource_path, - user_delegation_key=self.user_delegation_key) + sas.add_resource_signature( + self.account_name, + self.account_key, + resource_path, + user_delegation_key=self.user_delegation_key, + request_headers=request_headers, + request_query_params=request_query_params + ) if sts_hook is not None: sts_hook(sas.string_to_sign) @@ -188,6 +202,8 @@ def generate_container( content_language: Optional[str] = None, content_type: Optional[str] = None, user_delegation_oid: Optional[str] = None, + request_headers: Optional[Dict[str, str]] = None, + request_query_params: Optional[Dict[str, str]] = None, sts_hook: Optional[Callable[[str], None]] = None, **kwargs: Any ) -> str: @@ -251,6 +267,12 @@ def generate_container( Specifies the Entra ID of the user that is authorized to use the resulting SAS URL. The resulting SAS URL must be used in conjunction with an Entra ID token that has been issued to the user specified in this value. + :param Dict[str, str] request_headers: + Specifies a set of headers and their corresponding values that + must be present in the request when using this SAS. + :param Dict[str, str] request_query_params: + Specifies a set of query parameters and their corresponding values that + must be present in the request when using this SAS. :param sts_hook: For debugging purposes only. If provided, the hook is called with the string to sign that was used to generate the SAS. @@ -268,8 +290,14 @@ def generate_container( content_type) sas.add_encryption_scope(**kwargs) sas.add_info_for_hns_account(**kwargs) - sas.add_resource_signature(self.account_name, self.account_key, container_name, - user_delegation_key=self.user_delegation_key) + sas.add_resource_signature( + self.account_name, + self.account_key, + container_name, + user_delegation_key=self.user_delegation_key, + request_headers=request_headers, + request_query_params=request_query_params + ) if sts_hook is not None: sts_hook(sas.string_to_sign) @@ -292,7 +320,16 @@ def get_value_to_append(self, query): return_value = self.query_dict.get(query) or '' return return_value + '\n' - def add_resource_signature(self, account_name, account_key, path, user_delegation_key=None): + def add_resource_signature( + self, + account_name, + account_key, + path, + user_delegation_key=None, + *, + request_headers=None, + request_query_params=None + ): if path[0] != '/': path = '/' + path @@ -313,6 +350,12 @@ def add_resource_signature(self, account_name, account_key, path, user_delegatio self._add_query(QueryStringConstants.SIGNED_KEY_EXPIRY, user_delegation_key.signed_expiry) self._add_query(QueryStringConstants.SIGNED_KEY_SERVICE, user_delegation_key.signed_service) self._add_query(QueryStringConstants.SIGNED_KEY_VERSION, user_delegation_key.signed_version) + self._add_query( + QueryStringConstants.SIGNED_KEY_DELEGATED_USER_TID, + user_delegation_key.signed_delegated_user_tid + ) + self.add_request_headers(request_headers) + self.add_request_query_params(request_query_params) string_to_sign += \ (self.get_value_to_append(QueryStringConstants.SIGNED_OID) + @@ -329,18 +372,26 @@ def add_resource_signature(self, account_name, account_key, path, user_delegatio else: string_to_sign += self.get_value_to_append(QueryStringConstants.SIGNED_IDENTIFIER) - string_to_sign += \ - (self.get_value_to_append(QueryStringConstants.SIGNED_IP) + + string_to_sign += ( + self.get_value_to_append(QueryStringConstants.SIGNED_IP) + self.get_value_to_append(QueryStringConstants.SIGNED_PROTOCOL) + self.get_value_to_append(QueryStringConstants.SIGNED_VERSION) + self.get_value_to_append(QueryStringConstants.SIGNED_RESOURCE) + self.get_value_to_append(BlobQueryStringConstants.SIGNED_TIMESTAMP) + - self.get_value_to_append(QueryStringConstants.SIGNED_ENCRYPTION_SCOPE) + + self.get_value_to_append(QueryStringConstants.SIGNED_ENCRYPTION_SCOPE) + ) + + if user_delegation_key is not None: + string_to_sign += (self._sts_srh + "\n") if self._sts_srh else "\n" + string_to_sign += (self._sts_srq + "\n") if self._sts_srq else "\n" + + string_to_sign += ( self.get_value_to_append(QueryStringConstants.SIGNED_CACHE_CONTROL) + self.get_value_to_append(QueryStringConstants.SIGNED_CONTENT_DISPOSITION) + self.get_value_to_append(QueryStringConstants.SIGNED_CONTENT_ENCODING) + self.get_value_to_append(QueryStringConstants.SIGNED_CONTENT_LANGUAGE) + - self.get_value_to_append(QueryStringConstants.SIGNED_CONTENT_TYPE)) + self.get_value_to_append(QueryStringConstants.SIGNED_CONTENT_TYPE) + ) # remove the trailing newline if string_to_sign[-1] == '\n': @@ -355,7 +406,8 @@ def get_token(self) -> str: # a conscious decision was made to exclude the timestamp in the generated token # this is to avoid having two snapshot ids in the query parameters when the user appends the snapshot timestamp exclude = [BlobQueryStringConstants.SIGNED_TIMESTAMP] - return '&'.join([f'{n}={url_quote(v)}' + no_quote = [QueryStringConstants.SIGNED_REQUEST_HEADERS, QueryStringConstants.SIGNED_REQUEST_QUERY_PARAMS] + return '&'.join([f'{n}={url_quote(v)}' if n not in no_quote else f"{n}={v}" for n, v in self.query_dict.items() if v is not None and n not in exclude]) @@ -452,6 +504,8 @@ def generate_container_sas( ip: Optional[str] = None, *, user_delegation_oid: Optional[str] = None, + request_headers: Optional[Dict[str, str]] = None, + request_query_params: Optional[Dict[str, str]] = None, sts_hook: Optional[Callable[[str], None]] = None, **kwargs: Any ) -> str: @@ -530,6 +584,12 @@ def generate_container_sas( Specifies the Entra ID of the user that is authorized to use the resulting SAS URL. The resulting SAS URL must be used in conjunction with an Entra ID token that has been issued to the user specified in this value. + :keyword Dict[str, str] request_headers: + Specifies a set of headers and their corresponding values that + must be present in the request when using this SAS. + :keyword Dict[str, str] request_query_params: + Specifies a set of query parameters and their corresponding values that + must be present in the request when using this SAS. :keyword sts_hook: For debugging purposes only. If provided, the hook is called with the string to sign that was used to generate the SAS. @@ -567,6 +627,8 @@ def generate_container_sas( policy_id=policy_id, ip=ip, user_delegation_oid=user_delegation_oid, + request_headers=request_headers, + request_query_params=request_query_params, sts_hook=sts_hook, **kwargs ) @@ -586,6 +648,8 @@ def generate_blob_sas( ip: Optional[str] = None, *, user_delegation_oid: Optional[str] = None, + request_headers: Optional[Dict[str, str]] = None, + request_query_params: Optional[Dict[str, str]] = None, sts_hook: Optional[Callable[[str], None]] = None, **kwargs: Any ) -> str: @@ -676,6 +740,12 @@ def generate_blob_sas( Specifies the Entra ID of the user that is authorized to use the resulting SAS URL. The resulting SAS URL must be used in conjunction with an Entra ID token that has been issued to the user specified in this value. + :keyword Dict[str, str] request_headers: + If specified, both the correct request header(s) and corresponding values must be present, + or the request will fail. + :keyword Dict[str, str] request_query_params: + If specified, both the correct query parameter(s) and corresponding values must be present, + or the request will fail. :keyword sts_hook: For debugging purposes only. If provided, the hook is called with the string to sign that was used to generate the SAS. @@ -709,8 +779,10 @@ def generate_blob_sas( start=start, policy_id=policy_id, ip=ip, - sts_hook=sts_hook, user_delegation_oid=user_delegation_oid, + request_headers=request_headers, + request_query_params=request_query_params, + sts_hook=sts_hook, **kwargs ) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_client_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_client_async.py index 3166093a736d..f1143006ec69 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_client_async.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_client_async.py @@ -196,8 +196,7 @@ def __init__( self._raw_credential = credential if credential else sas_token self._query_str, credential = self._format_query_string(sas_token, credential, snapshot=self.snapshot) super(BlobClient, self).__init__(parsed_url, service='blob', credential=credential, **kwargs) - self._client = AzureBlobStorage(self.url, base_url=self.url, pipeline=self._pipeline) - self._client._config.version = get_api_version(kwargs) # type: ignore [assignment] + self._client = AzureBlobStorage(self.url, get_api_version(kwargs), base_url=self.url, pipeline=self._pipeline) self._configure_encryption(kwargs) async def __aenter__(self) -> Self: @@ -438,6 +437,10 @@ async def upload_blob_from_url( Use of customer-provided keys must be done over HTTPS. As the encryption key itself is provided in the request, a secure connection must be established to transfer the key. + :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey source_cpk: + Specifies the source encryption key to use to decrypt + the source data provided in the request. + Use of customer-provided keys must be done over HTTPS. :keyword str encryption_scope: A predefined encryption scope used to encrypt the data on the service. An encryption scope can be created using the Management API and referenced here by name. If a default @@ -461,8 +464,9 @@ async def upload_blob_from_url( :return: Response from creating a new block blob for a given URL. :rtype: Dict[str, Any] """ - if kwargs.get('cpk') and self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") + if self.scheme.lower() != 'https': + if kwargs.get('cpk') or kwargs.get('source_cpk'): + raise ValueError("Customer provided encryption key must be used over HTTPS.") options = _upload_blob_from_url_options( source_url=source_url, metadata=metadata, @@ -484,7 +488,7 @@ async def upload_blob( """Creates a new blob from a data source with automatic chunking. :param data: The blob data to upload. - :type data: Union[bytes, str, Iterable[AnyStr], AsyncIterable[AnyStr], IO[AnyStr]] + :type data: Union[bytes, str, Iterable[AnyStr], AsyncIterable[AnyStr], IO[bytes]] :param ~azure.storage.blob.BlobType blob_type: The type of the blob. This can be either BlockBlob, PageBlob or AppendBlob. The default value is BlockBlob. :param int length: @@ -977,6 +981,18 @@ async def delete_blob(self, delete_snapshots: Optional[str] = None, **kwargs: An .. versionadded:: 12.4.0 + :keyword ~datetime.datetime access_tier_if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the access-tier has been modified since the specified date/time. + :keyword ~datetime.datetime access_tier_if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the access-tier has been modified since the specified date/time. :keyword int timeout: Sets the server-side timeout for the operation in seconds. For more details see https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. @@ -999,7 +1015,8 @@ async def delete_blob(self, delete_snapshots: Optional[str] = None, **kwargs: An snapshot=self.snapshot, version_id=get_version_id(self.version_id, kwargs), delete_snapshots=delete_snapshots, - **kwargs) + **kwargs + ) try: await self._client.blob.delete(**options) except HttpResponseError as error: @@ -2022,7 +2039,7 @@ async def set_standard_blob_tier(self, standard_blob_tier: Union[str, "StandardB @distributed_trace_async async def stage_block( self, block_id: str, - data: Union[bytes, str, Iterable[AnyStr], IO[AnyStr]], + data: Union[bytes, Iterable[bytes], IO[bytes]], length: Optional[int] = None, **kwargs: Any ) -> Dict[str, Any]: @@ -2032,8 +2049,10 @@ async def stage_block( The string should be less than or equal to 64 bytes in size. For a given blob, the block_id must be the same size for each block. :param data: The blob data. - :type data: Union[bytes, str, Iterable[AnyStr], IO[AnyStr]] - :param int length: Size of the block. + :type data: Union[bytes, str, Iterable[bytes], IO[bytes]] + :param int length: + Size of the block. Optional if the length of data can be determined. For Iterable and IO, if the + length is not provided and cannot be determined, all data will be read into memory. :keyword bool validate_content: If true, calculates an MD5 hash for each chunk of the blob. The storage service checks the hash of the content that has arrived with the hash @@ -2117,6 +2136,10 @@ async def stage_block_from_url( Use of customer-provided keys must be done over HTTPS. As the encryption key itself is provided in the request, a secure connection must be established to transfer the key. + :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey source_cpk: + Specifies the source encryption key to use to decrypt + the source data provided in the request. + Use of customer-provided keys must be done over HTTPS. :keyword str encryption_scope: A predefined encryption scope used to encrypt the data on the service. An encryption scope can be created using the Management API and referenced here by name. If a default @@ -2146,8 +2169,9 @@ async def stage_block_from_url( :return: Blob property dict. :rtype: Dict[str, Any] """ - if kwargs.get('cpk') and self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") + if self.scheme.lower() != 'https': + if kwargs.get('cpk') or kwargs.get('source_cpk'): + raise ValueError("Customer provided encryption key must be used over HTTPS.") options = _stage_block_from_url_options( block_id=block_id, source_url=source_url, @@ -3037,6 +3061,10 @@ async def upload_pages_from_url( Use of customer-provided keys must be done over HTTPS. As the encryption key itself is provided in the request, a secure connection must be established to transfer the key. + :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey source_cpk: + Specifies the source encryption key to use to decrypt + the source data provided in the request. + Use of customer-provided keys must be done over HTTPS. :keyword str encryption_scope: A predefined encryption scope used to encrypt the data on the service. An encryption scope can be created using the Management API and referenced here by name. If a default @@ -3069,8 +3097,9 @@ async def upload_pages_from_url( if self.require_encryption or (self.key_encryption_key is not None): raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) - if kwargs.get('cpk') and self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") + if self.scheme.lower() != 'https': + if kwargs.get('cpk') or kwargs.get('source_cpk'): + raise ValueError("Customer provided encryption key must be used over HTTPS.") options = _upload_pages_from_url_options( source_url=source_url, offset=offset, @@ -3163,7 +3192,7 @@ async def clear_page(self, offset: int, length: int, **kwargs: Any) -> Dict[str, @distributed_trace_async async def append_block( - self, data: Union[bytes, str, Iterable[AnyStr], IO[AnyStr]], + self, data: Union[bytes, Iterable[bytes], IO[bytes]], length: Optional[int] = None, **kwargs: Any ) -> Dict[str, Union[str, datetime, int]]: @@ -3171,9 +3200,10 @@ async def append_block( :param data: Content of the block. - :type data: Union[bytes, str, Iterable[AnyStr], IO[AnyStr]] + :type data: Union[bytes, Iterable[bytes], IO[bytes]] :param int length: - Size of the block in bytes. + Size of the block. Optional if the length of data can be determined. For Iterable and IO, if the + length is not provided and cannot be determined, all data will be read into memory. :keyword bool validate_content: If true, calculates an MD5 hash of the block content. The storage service checks the hash of the content that has arrived @@ -3339,6 +3369,10 @@ async def append_block_from_url( Use of customer-provided keys must be done over HTTPS. As the encryption key itself is provided in the request, a secure connection must be established to transfer the key. + :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey source_cpk: + Specifies the source encryption key to use to decrypt + the source data provided in the request. + Use of customer-provided keys must be done over HTTPS. :keyword str encryption_scope: A predefined encryption scope used to encrypt the data on the service. An encryption scope can be created using the Management API and referenced here by name. If a default @@ -3370,8 +3404,9 @@ async def append_block_from_url( """ if self.require_encryption or (self.key_encryption_key is not None): raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) - if kwargs.get('cpk') and self.scheme.lower() != 'https': - raise ValueError("Customer provided encryption key must be used over HTTPS.") + if self.scheme.lower() != 'https': + if kwargs.get('cpk') or kwargs.get('source_cpk'): + raise ValueError("Customer provided encryption key must be used over HTTPS.") options = _append_block_from_url_options( copy_source_url=copy_source_url, source_offset=source_offset, diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_client_async.pyi b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_client_async.pyi index 94dd817efdf0..dd7dc9ede496 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_client_async.pyi +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_client_async.pyi @@ -158,6 +158,7 @@ class BlobClient( # type: ignore[misc] timeout: Optional[int] = None, content_settings: Optional[ContentSettings] = None, cpk: Optional[CustomerProvidedEncryptionKey] = None, + source_cpk: Optional[CustomerProvidedEncryptionKey] = None, encryption_scope: Optional[str] = None, standard_blob_tier: Optional[StandardBlobTier] = None, source_authorization: Optional[str] = None, @@ -272,6 +273,8 @@ class BlobClient( # type: ignore[misc] etag: Optional[str] = None, match_condition: Optional[MatchConditions] = None, if_tags_match_condition: Optional[str] = None, + access_tier_if_modified_since: Optional[datetime] = None, + access_tier_if_unmodified_since: Optional[datetime] = None, timeout: Optional[int] = None, **kwargs: Any ) -> None: ... @@ -464,7 +467,7 @@ class BlobClient( # type: ignore[misc] async def stage_block( self, block_id: str, - data: Union[bytes, str, Iterable[AnyStr], IO[AnyStr]], + data: Union[bytes, Iterable[bytes], IO[bytes]], length: Optional[int] = None, *, validate_content: Optional[bool] = None, @@ -486,6 +489,7 @@ class BlobClient( # type: ignore[misc] *, lease: Optional[Union[BlobLeaseClient, str]] = None, cpk: Optional[CustomerProvidedEncryptionKey] = None, + source_cpk: Optional[CustomerProvidedEncryptionKey] = None, encryption_scope: Optional[str] = None, source_authorization: Optional[str] = None, source_token_intent: Optional[Literal["backup"]] = None, @@ -689,6 +693,7 @@ class BlobClient( # type: ignore[misc] match_condition: Optional[MatchConditions] = None, if_tags_match_condition: Optional[str] = None, cpk: Optional[CustomerProvidedEncryptionKey] = None, + source_cpk: Optional[CustomerProvidedEncryptionKey] = None, encryption_scope: Optional[str] = None, source_authorization: Optional[str] = None, source_token_intent: Optional[Literal["backup"]] = None, @@ -717,7 +722,7 @@ class BlobClient( # type: ignore[misc] @distributed_trace_async async def append_block( self, - data: Union[bytes, str, Iterable[AnyStr], IO[AnyStr]], + data: Union[bytes, Iterable[bytes], IO[bytes]], length: Optional[int] = None, *, validate_content: Optional[bool] = None, @@ -756,6 +761,7 @@ class BlobClient( # type: ignore[misc] source_etag: Optional[str] = None, source_match_condition: Optional[MatchConditions] = None, cpk: Optional[CustomerProvidedEncryptionKey] = None, + source_cpk: Optional[CustomerProvidedEncryptionKey] = None, encryption_scope: Optional[str] = None, source_authorization: Optional[str] = None, source_token_intent: Optional[Literal["backup"]] = None, diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_service_client_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_service_client_async.py index 6d28b4c6d732..8a4eb6bb03dd 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_service_client_async.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_service_client_async.py @@ -135,8 +135,7 @@ def __init__( _, sas_token = parse_query(parsed_url.query) self._query_str, credential = self._format_query_string(sas_token, credential) super(BlobServiceClient, self).__init__(parsed_url, service='blob', credential=credential, **kwargs) - self._client = AzureBlobStorage(self.url, base_url=self.url, pipeline=self._pipeline) - self._client._config.version = get_api_version(kwargs) # type: ignore [assignment] + self._client = AzureBlobStorage(self.url, get_api_version(kwargs), base_url=self.url, pipeline=self._pipeline) self._configure_encryption(kwargs) async def __aenter__(self) -> Self: @@ -235,6 +234,8 @@ def from_connection_string( async def get_user_delegation_key( self, key_start_time: "datetime", key_expiry_time: "datetime", + *, + delegated_user_tid: Optional[str] = None, **kwargs: Any ) -> "UserDelegationKey": """ @@ -245,6 +246,7 @@ async def get_user_delegation_key( A DateTime value. Indicates when the key becomes valid. :param ~datetime.datetime key_expiry_time: A DateTime value. Indicates when the key stops being valid. + :keyword str delegated_user_tid: The delegated user tenant id in Entra ID. :keyword int timeout: Sets the server-side timeout for the operation in seconds. For more details see https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. @@ -254,7 +256,11 @@ async def get_user_delegation_key( :return: The user delegation key. :rtype: ~azure.storage.blob.UserDelegationKey """ - key_info = KeyInfo(start=_to_utc_datetime(key_start_time), expiry=_to_utc_datetime(key_expiry_time)) + key_info = KeyInfo( + start=_to_utc_datetime(key_start_time), + expiry=_to_utc_datetime(key_expiry_time), + delegated_user_tid=delegated_user_tid + ) timeout = kwargs.pop('timeout', None) try: user_delegation_key = await self._client.service.get_user_delegation_key(key_info=key_info, diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_service_client_async.pyi b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_service_client_async.pyi index 6d12758291e9..cf342630fa4a 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_service_client_async.pyi +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_service_client_async.pyi @@ -91,7 +91,13 @@ class BlobServiceClient( # type: ignore [misc] ) -> Self: ... @distributed_trace_async async def get_user_delegation_key( - self, key_start_time: datetime, key_expiry_time: datetime, *, timeout: Optional[int] = None, **kwargs: Any + self, + key_start_time: datetime, + key_expiry_time: datetime, + *, + delegated_user_tid: Optional[str] = None, + timeout: Optional[int] = None, + **kwargs: Any ) -> UserDelegationKey: ... @distributed_trace_async async def get_account_information(self, **kwargs: Any) -> Dict[str, str]: ... diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_container_client_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_container_client_async.py index 72376a2dfcfe..e08abc8d3ca6 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_container_client_async.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_container_client_async.py @@ -164,9 +164,7 @@ async def close(self) -> None: await self._client.close() def _build_generated_client(self) -> AzureBlobStorage: - client = AzureBlobStorage(self.url, base_url=self.url, pipeline=self._pipeline) - client._config.version = self._api_version # type: ignore [assignment] # pylint: disable=protected-access - return client + return AzureBlobStorage(self.url, self._api_version, base_url=self.url, pipeline=self._pipeline) def _format_url(self, hostname): return _format_url( diff --git a/sdk/storage/azure-storage-blob/setup.py b/sdk/storage/azure-storage-blob/setup.py index c25bb083178e..0abb2504e0fb 100644 --- a/sdk/storage/azure-storage-blob/setup.py +++ b/sdk/storage/azure-storage-blob/setup.py @@ -79,14 +79,14 @@ ]), python_requires=">=3.9", install_requires=[ - "azure-core>=1.30.0", + "azure-core>=1.37.0", "cryptography>=2.1.4", "typing-extensions>=4.6.0", "isodate>=0.6.1" ], extras_require={ "aio": [ - "azure-core[aio]>=1.30.0", + "azure-core[aio]>=1.37.0", ], }, ) diff --git a/sdk/storage/azure-storage-blob/swagger/README.md b/sdk/storage/azure-storage-blob/swagger/README.md index dd4fabfcdf42..a01f9a614515 100644 --- a/sdk/storage/azure-storage-blob/swagger/README.md +++ b/sdk/storage/azure-storage-blob/swagger/README.md @@ -16,7 +16,7 @@ autorest --v3 --python ### Settings ``` yaml -input-file: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/storage/data-plane/Microsoft.BlobStorage/stable/2026-02-06/blob.json +input-file: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/storage/data-plane/Microsoft.BlobStorage/stable/2026-04-06/blob.json output-folder: ../azure/storage/blob/_generated namespace: azure.storage.blob no-namespace-folders: true diff --git a/sdk/storage/azure-storage-blob/tests/fake_credentials.py b/sdk/storage/azure-storage-blob/tests/fake_credentials.py index 0669d06ac152..bb3f5badeef7 100644 --- a/sdk/storage/azure-storage-blob/tests/fake_credentials.py +++ b/sdk/storage/azure-storage-blob/tests/fake_credentials.py @@ -6,3 +6,5 @@ CPK_KEY_VALUE = "MDEyMzQ1NjcwMTIzNDU2NzAxMjM0NTY3MDEyMzQ1Njc=" CPK_KEY_HASH = "3QFFFpRA5+XANHqwwbT4yXDmrT/2JaLt/FKHjzhOdoE=" +NEW_CPK_KEY_VALUE = "d8ZJUhe2xp+U3TnFoLuTW2k+L74Brz8HyxxRBPApO0w=" +NEW_CPK_KEY_HASH = "3bGYFD8lov2MugoticyOw+tMaGonjlGXUopX9PyPnSo=" diff --git a/sdk/storage/azure-storage-blob/tests/test_common_blob.py b/sdk/storage/azure-storage-blob/tests/test_common_blob.py index 17ea27de2141..074505aec045 100644 --- a/sdk/storage/azure-storage-blob/tests/test_common_blob.py +++ b/sdk/storage/azure-storage-blob/tests/test_common_blob.py @@ -11,6 +11,7 @@ from datetime import datetime, timedelta from enum import Enum from io import BytesIO +from urllib.parse import quote, urlencode from azure.mgmt.storage import StorageManagementClient @@ -23,7 +24,8 @@ HttpResponseError, ResourceExistsError, ResourceModifiedError, - ResourceNotFoundError) + ResourceNotFoundError +) from azure.core.pipeline.transport import RequestsTransport from azure.storage.blob import ( AccessPolicy, @@ -40,7 +42,6 @@ ImmutabilityPolicy, LinearRetry, ResourceTypes, - RetentionPolicy, Services, StandardBlobTier, StorageErrorCode, @@ -48,7 +49,8 @@ generate_account_sas, generate_blob_sas, generate_container_sas, - upload_blob_to_url) + upload_blob_to_url +) from azure.storage.blob._generated.models import RehydratePriority from devtools_testutils import FakeTokenCredential, recorded_by_proxy @@ -3596,65 +3598,44 @@ def test_upload_blob_partial_stream_chunked(self, **kwargs): result = blob.download_blob().readall() assert result == data[:length] - @pytest.mark.live_test_only @BlobPreparer() - def test_blob_user_delegation_oid(self, **kwargs): + @recorded_by_proxy + def test_delete_blob_access_tier_conditionals(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") + storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - token_credential = self.get_credential(BlobServiceClient) - data = b"abc123" - service = BlobServiceClient( - account_url=self.account_url(storage_account_name, "blob"), - credential=token_credential - ) - start = self.get_datetime_variable(variables, 'start', datetime.utcnow()) - expiry = self.get_datetime_variable(variables, 'expiry', datetime.utcnow() + timedelta(hours=1)) - user_delegation_key = service.get_user_delegation_key(key_start_time=start, key_expiry_time=expiry) - token = token_credential.get_token("https://storage.azure.com/.default") - user_delegation_oid = jwt.decode(token.token, options={"verify_signature": False}).get("oid") + self._setup(storage_account_name, storage_account_key) - container_name = self.get_resource_name('oauthcontainer') - container = service.create_container(container_name) - blob = container.get_blob_client(self.get_resource_name('oauthblob')) - blob.upload_blob(data, length=len(data)) + early = self.get_datetime_variable(variables, 'early', datetime.utcnow()) - container_token = self.generate_sas( - generate_container_sas, - container.account_name, - container.container_name, - permission=ContainerSasPermissions(read=True, list=True), - expiry=datetime.utcnow() + timedelta(hours=1), - user_delegation_key=user_delegation_key, - user_delegation_oid=user_delegation_oid + if self.is_live: + self.sleep(10) + + blob1_name = self._create_block_blob() + blob1 = self.bsc.get_blob_client(self.container_name, blob1_name) + blob2_name = self._get_blob_reference() + "2" + blob2 = self.bsc.get_blob_client(self.container_name, blob2_name) + blob2.upload_blob( + self.byte_data, + length=len(self.byte_data), + standard_blob_tier=StandardBlobTier.COOL, + overwrite=True ) - assert "sduoid=" + user_delegation_oid in container_token + blob1.set_standard_blob_tier('Cool') + blob2.set_standard_blob_tier('Hot') - container_client = ContainerClient.from_container_url( - f"{container.url}?{container_token}", - credential=token_credential - ) - blobs_list = list(container_client.list_blobs()) - assert blobs_list is not None + late = self.get_datetime_variable(variables, 'late', datetime.utcnow() + timedelta(hours=1)) - blob_token = self.generate_sas( - generate_blob_sas, - blob.account_name, - blob.container_name, - blob.blob_name, - permission=BlobSasPermissions(read=True), - expiry=datetime.utcnow() + timedelta(hours=1), - user_delegation_key=user_delegation_key, - user_delegation_oid=user_delegation_oid - ) - assert "sduoid=" + user_delegation_oid in blob_token + with pytest.raises(HttpResponseError): + blob1.delete_blob(access_tier_if_modified_since=late) + resp = blob1.delete_blob(access_tier_if_modified_since=early) + assert resp is None - blob_client = BlobClient.from_blob_url( - f"{blob.url}?{blob_token}", - credential=token_credential - ) - content = blob_client.download_blob().readall() - assert content == data + with pytest.raises(HttpResponseError): + blob2.delete_blob(access_tier_if_unmodified_since=early) + resp = blob2.delete_blob(access_tier_if_unmodified_since=late) + assert resp is None return variables @@ -3707,4 +3688,127 @@ def test_download_blob_no_decompress_chunks(self, **kwargs): result = blob.download_blob(decompress=False).readall() assert result == compressed_data + @pytest.mark.live_test_only + @BlobPreparer() + def test_blob_dynamic_user_delegation_sas(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + + token_credential = self.get_credential(BlobServiceClient) + service = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=token_credential) + container_name, blob_name = self.get_resource_name('oauthcontainer'), self.get_resource_name('oauthblob') + container = service.create_container(container_name) + blob = container.get_blob_client(blob_name) + blob.upload_blob(b"abc") + + user_delegation_key = service.get_user_delegation_key( + key_start_time=datetime.utcnow(), + key_expiry_time=datetime.utcnow() + timedelta(hours=1), + ) + + request_headers = { + "foo$": "bar!", + "company": "msft", + "city": "redmond,atlanta,reston", + } + + request_query_params = { + "hello$": "world!", + "check": "spelling", + "firstName": "john,Tim", + } + + blob_token = self.generate_sas( + generate_blob_sas, + blob.account_name, + blob.container_name, + blob.blob_name, + permission=BlobSasPermissions(read=True), + expiry=datetime.utcnow() + timedelta(hours=1), + user_delegation_key=user_delegation_key, + request_headers=request_headers, + request_query_params=request_query_params + ) + + def callback(request): + for k, v in request_headers.items(): + request.http_request.headers[k] = v + extra = urlencode(request_query_params, quote_via=quote, safe="") + request.http_request.url = request.http_request.url + "&" + extra + + identity_blob = BlobClient.from_blob_url(f"{blob.url}?{blob_token}") + props = identity_blob.get_blob_properties(raw_request_hook=callback) + assert props is not None + + @pytest.mark.live_test_only + @BlobPreparer() + def test_blob_cross_tenant_delegation_sas(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + + token_credential = self.get_credential(BlobServiceClient) + service = BlobServiceClient( + account_url=self.account_url(storage_account_name, "blob"), + credential=token_credential + ) + start = datetime.utcnow() + expiry = datetime.utcnow() + timedelta(hours=1) + token = token_credential.get_token("https://storage.azure.com/.default") + decoded = jwt.decode(token.token, options={"verify_signature": False}) + user_delegation_oid = decoded.get("oid") + delegated_user_tid = decoded.get("tid") + user_delegation_key = service.get_user_delegation_key( + key_start_time=start, + key_expiry_time=expiry, + delegated_user_tid=delegated_user_tid + ) + + assert user_delegation_key is not None + assert user_delegation_key.signed_delegated_user_tid == delegated_user_tid + + container_name = self.get_resource_name('oauthcontainer') + container = service.create_container(container_name) + blob = container.get_blob_client(self.get_resource_name('oauthblob')) + data = b"abc123" + blob.upload_blob(data, length=len(data)) + + container_token = self.generate_sas( + generate_container_sas, + container.account_name, + container.container_name, + permission=ContainerSasPermissions(read=True, list=True), + expiry=expiry, + user_delegation_key=user_delegation_key, + user_delegation_oid=user_delegation_oid + ) + + assert "sduoid=" + user_delegation_oid in container_token + assert "skdutid=" + delegated_user_tid in container_token + + container_client = ContainerClient.from_container_url( + f"{container.url}?{container_token}", + credential=token_credential + ) + blobs_list = list(container_client.list_blobs()) + assert blobs_list is not None + + blob_token = self.generate_sas( + generate_blob_sas, + blob.account_name, + blob.container_name, + blob.blob_name, + permission=BlobSasPermissions(read=True), + expiry=expiry, + user_delegation_key=user_delegation_key, + user_delegation_oid=user_delegation_oid + ) + + assert "sduoid=" + user_delegation_oid in blob_token + assert "skdutid=" + delegated_user_tid in blob_token + + identity_blob = BlobClient.from_blob_url( + f"{blob.url}?{blob_token}", + credential=token_credential + ) + content = identity_blob.download_blob().readall() + assert content == data + # ------------------------------------------------------------------------------ \ No newline at end of file diff --git a/sdk/storage/azure-storage-blob/tests/test_common_blob_async.py b/sdk/storage/azure-storage-blob/tests/test_common_blob_async.py index 161451f893dd..94ce920487f6 100644 --- a/sdk/storage/azure-storage-blob/tests/test_common_blob_async.py +++ b/sdk/storage/azure-storage-blob/tests/test_common_blob_async.py @@ -12,6 +12,7 @@ from datetime import datetime, timedelta from enum import Enum from io import BytesIO +from urllib.parse import quote, urlencode import aiohttp import pytest @@ -22,7 +23,9 @@ HttpResponseError, ResourceNotFoundError, ResourceExistsError, - ClientAuthenticationError, ResourceModifiedError) + ClientAuthenticationError, + ResourceModifiedError +) from azure.core.pipeline.transport import AioHttpTransport from azure.mgmt.storage.aio import StorageManagementClient from azure.storage.blob.aio import ( @@ -30,7 +33,8 @@ BlobServiceClient, ContainerClient, download_blob_from_url, - upload_blob_to_url) + upload_blob_to_url +) from azure.storage.blob import ( AccessPolicy, AccountSasPermissions, @@ -43,13 +47,13 @@ ImmutabilityPolicy, RehydratePriority, ResourceTypes, - RetentionPolicy, Services, StandardBlobTier, StorageErrorCode, generate_account_sas, generate_container_sas, - generate_blob_sas) + generate_blob_sas +) from devtools_testutils.fake_credentials_async import AsyncFakeCredential from devtools_testutils.aio import recorded_by_proxy_async from devtools_testutils.storage.aio import AsyncStorageRecordedTestCase @@ -3527,68 +3531,44 @@ async def test_upload_blob_partial_stream_chunked(self, **kwargs): result = await (await blob.download_blob()).readall() assert result == data[:length] - @pytest.mark.live_test_only @BlobPreparer() - async def test_blob_user_delegation_oid(self, **kwargs): + @recorded_by_proxy_async + async def test_delete_blob_access_tier_conditionals(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") + storage_account_key = kwargs.pop("storage_account_key") variables = kwargs.pop("variables", {}) - token_credential = self.get_credential(BlobServiceClient, is_async=True) - data = b"abc123" - service = BlobServiceClient( - account_url=self.account_url(storage_account_name, "blob"), - credential=token_credential - ) - start = self.get_datetime_variable(variables, 'start', datetime.utcnow()) - expiry = self.get_datetime_variable(variables, 'expiry', datetime.utcnow() + timedelta(hours=1)) - user_delegation_key = await service.get_user_delegation_key(key_start_time=start, key_expiry_time=expiry) - token = await token_credential.get_token("https://storage.azure.com/.default") - user_delegation_oid = jwt.decode(token.token, options={"verify_signature": False}).get("oid") + await self._setup(storage_account_name, storage_account_key) - container_name = self.get_resource_name('oauthcontainer') - container = await service.create_container(container_name) - blob = container.get_blob_client(self.get_resource_name('oauthblob')) - await blob.upload_blob(BytesIO(data), length=len(data)) + early = self.get_datetime_variable(variables, 'early', datetime.utcnow()) - container_token = self.generate_sas( - generate_container_sas, - container.account_name, - container.container_name, - permission=ContainerSasPermissions(read=True, list=True), - expiry=datetime.utcnow() + timedelta(hours=1), - user_delegation_key=user_delegation_key, - user_delegation_oid=user_delegation_oid + if self.is_live: + self.sleep(10) + + blob1_name = await self._create_block_blob() + blob1 = self.bsc.get_blob_client(self.container_name, blob1_name) + blob2_name = self._get_blob_reference() + "2" + blob2 = self.bsc.get_blob_client(self.container_name, blob2_name) + await blob2.upload_blob( + self.byte_data, + length=len(self.byte_data), + standard_blob_tier=StandardBlobTier.COOL, + overwrite=True ) - assert "sduoid=" + user_delegation_oid in container_token + await blob1.set_standard_blob_tier('Cool') + await blob2.set_standard_blob_tier('Hot') - container_client = ContainerClient.from_container_url( - f"{container.url}?{container_token}", - credential=token_credential - ) + late = self.get_datetime_variable(variables, 'late', datetime.utcnow() + timedelta(hours=1)) - blobs_list = [] - async for b in container_client.list_blobs(): - blobs_list.append(b) - assert blobs_list is not None - - sas_token = self.generate_sas( - generate_blob_sas, - blob.account_name, - blob.container_name, - blob.blob_name, - permission=BlobSasPermissions(read=True), - expiry=datetime.utcnow() + timedelta(hours=1), - user_delegation_key=user_delegation_key, - user_delegation_oid=user_delegation_oid - ) - assert "sduoid=" + user_delegation_oid in sas_token + with pytest.raises(HttpResponseError): + await blob1.delete_blob(access_tier_if_modified_since=late) + resp = await blob1.delete_blob(access_tier_if_modified_since=early) + assert resp is None - blob_client = BlobClient.from_blob_url( - f"{blob.url}?{sas_token}", - credential=token_credential - ) - content = await (await blob_client.download_blob()).readall() - assert content == data + with pytest.raises(HttpResponseError): + await blob2.delete_blob(access_tier_if_unmodified_since=early) + resp = await blob2.delete_blob(access_tier_if_unmodified_since=late) + assert resp is None return variables @@ -3643,4 +3623,130 @@ async def test_download_blob_no_decompress_chunks(self, **kwargs): result = await (await blob.download_blob(decompress=False)).readall() assert result == compressed_data + @pytest.mark.live_test_only + @BlobPreparer() + async def test_blob_dynamic_user_delegation_sas(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + + token_credential = self.get_credential(BlobServiceClient, is_async=True) + service = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=token_credential) + container_name, blob_name = self.get_resource_name('oauthcontainer'), self.get_resource_name('oauthblob') + container = await service.create_container(container_name) + blob = container.get_blob_client(blob_name) + await blob.upload_blob(b"abc") + + user_delegation_key = await service.get_user_delegation_key( + key_start_time=datetime.utcnow(), + key_expiry_time=datetime.utcnow() + timedelta(hours=1), + ) + + request_headers = { + "foo$": "bar!", + "company": "msft", + "city": "redmond,atlanta,reston", + } + + request_query_params = { + "hello$": "world!", + "check": "spelling", + "firstName": "john,Tim", + } + + blob_token = self.generate_sas( + generate_blob_sas, + blob.account_name, + blob.container_name, + blob.blob_name, + permission=BlobSasPermissions(read=True), + expiry=datetime.utcnow() + timedelta(hours=1), + user_delegation_key=user_delegation_key, + request_headers=request_headers, + request_query_params=request_query_params + ) + + def callback(request): + for k, v in request_headers.items(): + request.http_request.headers[k] = v + extra = urlencode(request_query_params, quote_via=quote, safe="") + request.http_request.url = request.http_request.url + "&" + extra + + identity_blob = BlobClient.from_blob_url(f"{blob.url}?{blob_token}") + props = await identity_blob.get_blob_properties(raw_request_hook=callback) + assert props is not None + + @pytest.mark.live_test_only + @BlobPreparer() + async def test_blob_cross_tenant_delegation_sas(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + + token_credential = self.get_credential(BlobServiceClient, is_async=True) + service = BlobServiceClient( + account_url=self.account_url(storage_account_name, "blob"), + credential=token_credential + ) + start = datetime.utcnow() + expiry = datetime.utcnow() + timedelta(hours=1) + token = await token_credential.get_token("https://storage.azure.com/.default") + decoded = jwt.decode(token.token, options={"verify_signature": False}) + user_delegation_oid = decoded.get("oid") + delegated_user_tid = decoded.get("tid") + user_delegation_key = await service.get_user_delegation_key( + key_start_time=start, + key_expiry_time=expiry, + delegated_user_tid=delegated_user_tid + ) + + assert user_delegation_key is not None + assert user_delegation_key.signed_delegated_user_tid == delegated_user_tid + + container_name = self.get_resource_name('oauthcontainer') + container = await service.create_container(container_name) + blob = container.get_blob_client(self.get_resource_name('oauthblob')) + data = b"abc123" + await blob.upload_blob(data, length=len(data)) + + container_token = self.generate_sas( + generate_container_sas, + container.account_name, + container.container_name, + permission=ContainerSasPermissions(read=True, list=True), + expiry=expiry, + user_delegation_key=user_delegation_key, + user_delegation_oid=user_delegation_oid + ) + + assert "sduoid=" + user_delegation_oid in container_token + assert "skdutid=" + delegated_user_tid in container_token + + container_client = ContainerClient.from_container_url( + f"{container.url}?{container_token}", + credential=token_credential + ) + + blobs_list = [] + async for b in container_client.list_blobs(): + blobs_list.append(b) + assert blobs_list is not None + + blob_token = self.generate_sas( + generate_blob_sas, + blob.account_name, + blob.container_name, + blob.blob_name, + permission=BlobSasPermissions(read=True), + expiry=expiry, + user_delegation_key=user_delegation_key, + user_delegation_oid=user_delegation_oid + ) + + assert "sduoid=" + user_delegation_oid in blob_token + assert "skdutid=" + delegated_user_tid in blob_token + + identity_blob = BlobClient.from_blob_url( + f"{blob.url}?{blob_token}", + credential=token_credential + ) + content = await (await identity_blob.download_blob()).readall() + assert content == data + # ------------------------------------------------------------------------------ diff --git a/sdk/storage/azure-storage-blob/tests/test_cpk.py b/sdk/storage/azure-storage-blob/tests/test_cpk.py index fea8c358689c..8752a6293135 100644 --- a/sdk/storage/azure-storage-blob/tests/test_cpk.py +++ b/sdk/storage/azure-storage-blob/tests/test_cpk.py @@ -19,11 +19,12 @@ from devtools_testutils import recorded_by_proxy from devtools_testutils.storage import StorageRecordedTestCase -from fake_credentials import CPK_KEY_HASH, CPK_KEY_VALUE +from fake_credentials import CPK_KEY_HASH, CPK_KEY_VALUE, NEW_CPK_KEY_HASH, NEW_CPK_KEY_VALUE from settings.testcase import BlobPreparer # ------------------------------------------------------------------------------ TEST_ENCRYPTION_KEY = CustomerProvidedEncryptionKey(key_value=CPK_KEY_VALUE, key_hash=CPK_KEY_HASH) +NEW_TEST_ENCRYPTION_KEY = CustomerProvidedEncryptionKey(key_value=NEW_CPK_KEY_VALUE, key_hash=NEW_CPK_KEY_HASH) # ------------------------------------------------------------------------------ @@ -685,5 +686,196 @@ def test_snapshot_blob(self, **kwargs): assert blob_snapshot is not None self._teardown(bsc) + @BlobPreparer() + @recorded_by_proxy + def test_append_block_from_url_with_rekeying(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + storage_account_key = kwargs.pop("storage_account_key") + + # Arrange + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), + credential=storage_account_key.secret + ) + self._setup(bsc) + + source_blob_client = bsc.get_blob_client(self.container_name, self.get_resource_name("sourceblob")) + source_blob_client.upload_blob(self.byte_data, blob_type=BlobType.APPENDBLOB, cpk=TEST_ENCRYPTION_KEY) + source_blob_sas = self.generate_sas( + generate_blob_sas, + source_blob_client.account_name, + source_blob_client.container_name, + source_blob_client.blob_name, + account_key=source_blob_client.credential.account_key, + permission=BlobSasPermissions(read=True), + expiry=datetime.utcnow() + timedelta(hours=1) + ) + source_blob_url = source_blob_client.url + "?" + source_blob_sas + + destination_blob_client = self._create_append_blob(bsc, cpk=NEW_TEST_ENCRYPTION_KEY) + + # Act + props = destination_blob_client.append_block_from_url( + source_blob_url, + source_offset=0, + source_length=len(self.byte_data), + cpk=NEW_TEST_ENCRYPTION_KEY, + source_cpk=TEST_ENCRYPTION_KEY + ) + + # Assert + assert props is not None + assert props['etag'] is not None + assert props['last_modified'] is not None + assert props['request_server_encrypted'] + + if self.is_live: + assert props['encryption_key_sha256'] == NEW_TEST_ENCRYPTION_KEY.key_hash + + self._teardown(bsc) + + @BlobPreparer() + @recorded_by_proxy + def test_upload_blob_from_url_with_rekeying(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + storage_account_key = kwargs.pop("storage_account_key") + + # Arrange + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), + credential=storage_account_key.secret + ) + self._setup(bsc) + + source_blob_client = bsc.get_blob_client(self.container_name, self.get_resource_name("sourceblob")) + source_blob_client.upload_blob(self.byte_data, cpk=TEST_ENCRYPTION_KEY) + source_blob_sas = self.generate_sas( + generate_blob_sas, + source_blob_client.account_name, + source_blob_client.container_name, + source_blob_client.blob_name, + account_key=source_blob_client.credential.account_key, + permission=BlobSasPermissions(read=True), + expiry=datetime.utcnow() + timedelta(hours=1) + ) + source_blob_url = source_blob_client.url + "?" + source_blob_sas + + destination_blob_client, _ = self._create_block_blob(bsc, cpk=NEW_TEST_ENCRYPTION_KEY) + + # Act + props = destination_blob_client.upload_blob_from_url( + source_blob_url, + overwrite=True, + cpk=NEW_TEST_ENCRYPTION_KEY, + source_cpk=TEST_ENCRYPTION_KEY + ) + + # Assert + assert props is not None + assert props['etag'] is not None + assert props['last_modified'] is not None + assert props['request_server_encrypted'] + + if self.is_live: + assert props['encryption_key_sha256'] == NEW_TEST_ENCRYPTION_KEY.key_hash + + self._teardown(bsc) + + @BlobPreparer() + @recorded_by_proxy + def test_stage_block_from_url_with_rekeying(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + storage_account_key = kwargs.pop("storage_account_key") + + # Arrange + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), + credential=storage_account_key.secret + ) + self._setup(bsc) + + source_blob_client = bsc.get_blob_client(self.container_name, self.get_resource_name("sourceblob")) + source_blob_client.upload_blob(self.byte_data, cpk=TEST_ENCRYPTION_KEY) + source_blob_sas = self.generate_sas( + generate_blob_sas, + source_blob_client.account_name, + source_blob_client.container_name, + source_blob_client.blob_name, + account_key=source_blob_client.credential.account_key, + permission=BlobSasPermissions(read=True), + expiry=datetime.utcnow() + timedelta(hours=1) + ) + source_blob_url = source_blob_client.url + "?" + source_blob_sas + + destination_blob_client, _ = self._create_block_blob(bsc, cpk=NEW_TEST_ENCRYPTION_KEY) + + # Act + block_id = '1' + props = destination_blob_client.stage_block_from_url( + block_id, + source_blob_url, + source_offset=0, + source_length=len(self.byte_data), + cpk=NEW_TEST_ENCRYPTION_KEY, + source_cpk=TEST_ENCRYPTION_KEY + ) + + # Assert + assert props is not None + assert props['request_server_encrypted'] + + if self.is_live: + assert props['encryption_key_sha256'] == NEW_TEST_ENCRYPTION_KEY.key_hash + + self._teardown(bsc) + + @BlobPreparer() + @recorded_by_proxy + def test_upload_pages_from_url_with_rekeying(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + storage_account_key = kwargs.pop("storage_account_key") + + # Arrange + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), + credential=storage_account_key.secret + ) + self._setup(bsc) + + source_blob_client = bsc.get_blob_client(self.container_name, self.get_resource_name("sourceblob")) + source_blob_client.upload_blob(self.byte_data, blob_type=BlobType.PAGEBLOB, cpk=TEST_ENCRYPTION_KEY) + source_blob_sas = self.generate_sas( + generate_blob_sas, + source_blob_client.account_name, + source_blob_client.container_name, + source_blob_client.blob_name, + account_key=source_blob_client.credential.account_key, + permission=BlobSasPermissions(read=True), + expiry=datetime.utcnow() + timedelta(hours=1) + ) + source_blob_url = source_blob_client.url + "?" + source_blob_sas + + destination_blob_client = self._create_page_blob(bsc, cpk=NEW_TEST_ENCRYPTION_KEY) + + # Act + props = destination_blob_client.upload_pages_from_url( + source_blob_url, + offset=0, + length=len(self.byte_data), + source_offset=0, + cpk=NEW_TEST_ENCRYPTION_KEY, + source_cpk=TEST_ENCRYPTION_KEY + ) + + # Assert + assert props is not None + assert props['etag'] is not None + assert props['last_modified'] is not None + assert props['request_server_encrypted'] + + if self.is_live: + assert props['encryption_key_sha256'] == NEW_TEST_ENCRYPTION_KEY.key_hash + + self._teardown(bsc) # ------------------------------------------------------------------------------ diff --git a/sdk/storage/azure-storage-blob/tests/test_cpk_async.py b/sdk/storage/azure-storage-blob/tests/test_cpk_async.py index f5cd9ece8ed8..cdb7eb4bec21 100644 --- a/sdk/storage/azure-storage-blob/tests/test_cpk_async.py +++ b/sdk/storage/azure-storage-blob/tests/test_cpk_async.py @@ -20,11 +20,12 @@ from devtools_testutils.aio import recorded_by_proxy_async from devtools_testutils.storage.aio import AsyncStorageRecordedTestCase -from fake_credentials import CPK_KEY_HASH, CPK_KEY_VALUE +from fake_credentials import CPK_KEY_HASH, CPK_KEY_VALUE, NEW_CPK_KEY_HASH, NEW_CPK_KEY_VALUE from settings.testcase import BlobPreparer # ------------------------------------------------------------------------------ TEST_ENCRYPTION_KEY = CustomerProvidedEncryptionKey(key_value=CPK_KEY_VALUE, key_hash=CPK_KEY_HASH) +NEW_TEST_ENCRYPTION_KEY = CustomerProvidedEncryptionKey(key_value=NEW_CPK_KEY_VALUE, key_hash=NEW_CPK_KEY_HASH) # ------------------------------------------------------------------------------ @@ -660,3 +661,195 @@ async def test_snapshot_blob(self, **kwargs): # Assert assert blob_snapshot is not None + + @BlobPreparer() + @recorded_by_proxy_async + async def test_append_block_from_url_with_rekeying(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + storage_account_key = kwargs.pop("storage_account_key") + + # Arrange + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), + credential=storage_account_key.secret + ) + await self._setup(bsc) + + source_blob_client = bsc.get_blob_client(self.container_name, self.get_resource_name("sourceblob")) + await source_blob_client.upload_blob(self.byte_data, blob_type=BlobType.APPENDBLOB, cpk=TEST_ENCRYPTION_KEY) + source_blob_sas = self.generate_sas( + generate_blob_sas, + source_blob_client.account_name, + source_blob_client.container_name, + source_blob_client.blob_name, + account_key=source_blob_client.credential.account_key, + permission=BlobSasPermissions(read=True), + expiry=datetime.utcnow() + timedelta(hours=1) + ) + source_blob_url = source_blob_client.url + "?" + source_blob_sas + + destination_blob_client = await self._create_append_blob(bsc, cpk=NEW_TEST_ENCRYPTION_KEY) + + # Act + props = await destination_blob_client.append_block_from_url( + source_blob_url, + source_offset=0, + source_length=len(self.byte_data), + cpk=NEW_TEST_ENCRYPTION_KEY, + source_cpk=TEST_ENCRYPTION_KEY + ) + + # Assert + assert props is not None + assert props['etag'] is not None + assert props['last_modified'] is not None + assert props['request_server_encrypted'] + + if self.is_live: + assert props['encryption_key_sha256'] == NEW_TEST_ENCRYPTION_KEY.key_hash + + self._teardown(bsc) + + @BlobPreparer() + @recorded_by_proxy_async + async def test_upload_blob_from_url_with_rekeying(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + storage_account_key = kwargs.pop("storage_account_key") + + # Arrange + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), + credential=storage_account_key.secret + ) + await self._setup(bsc) + + source_blob_client = bsc.get_blob_client(self.container_name, self.get_resource_name("sourceblob")) + await source_blob_client.upload_blob(self.byte_data, cpk=TEST_ENCRYPTION_KEY) + source_blob_sas = self.generate_sas( + generate_blob_sas, + source_blob_client.account_name, + source_blob_client.container_name, + source_blob_client.blob_name, + account_key=source_blob_client.credential.account_key, + permission=BlobSasPermissions(read=True), + expiry=datetime.utcnow() + timedelta(hours=1) + ) + source_blob_url = source_blob_client.url + "?" + source_blob_sas + + destination_blob_client, _ = await self._create_block_blob(bsc, cpk=NEW_TEST_ENCRYPTION_KEY) + + # Act + props = await destination_blob_client.upload_blob_from_url( + source_blob_url, + overwrite=True, + cpk=NEW_TEST_ENCRYPTION_KEY, + source_cpk=TEST_ENCRYPTION_KEY + ) + + # Assert + assert props is not None + assert props['etag'] is not None + assert props['last_modified'] is not None + assert props['request_server_encrypted'] + + if self.is_live: + assert props['encryption_key_sha256'] == NEW_TEST_ENCRYPTION_KEY.key_hash + + self._teardown(bsc) + + @BlobPreparer() + @recorded_by_proxy_async + async def test_stage_block_from_url_with_rekeying(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + storage_account_key = kwargs.pop("storage_account_key") + + # Arrange + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), + credential=storage_account_key.secret + ) + await self._setup(bsc) + + source_blob_client = bsc.get_blob_client(self.container_name, self.get_resource_name("sourceblob")) + await source_blob_client.upload_blob(self.byte_data, cpk=TEST_ENCRYPTION_KEY) + source_blob_sas = self.generate_sas( + generate_blob_sas, + source_blob_client.account_name, + source_blob_client.container_name, + source_blob_client.blob_name, + account_key=source_blob_client.credential.account_key, + permission=BlobSasPermissions(read=True), + expiry=datetime.utcnow() + timedelta(hours=1) + ) + source_blob_url = source_blob_client.url + "?" + source_blob_sas + + destination_blob_client, _ = await self._create_block_blob(bsc, cpk=NEW_TEST_ENCRYPTION_KEY) + + # Act + block_id = '1' + props = await destination_blob_client.stage_block_from_url( + block_id, + source_blob_url, + source_offset=0, + source_length=len(self.byte_data), + cpk=NEW_TEST_ENCRYPTION_KEY, + source_cpk=TEST_ENCRYPTION_KEY + ) + + # Assert + assert props is not None + assert props['request_server_encrypted'] + + if self.is_live: + assert props['encryption_key_sha256'] == NEW_TEST_ENCRYPTION_KEY.key_hash + + self._teardown(bsc) + + @BlobPreparer() + @recorded_by_proxy_async + async def test_upload_pages_from_url_with_rekeying(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + storage_account_key = kwargs.pop("storage_account_key") + + # Arrange + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), + credential=storage_account_key.secret + ) + await self._setup(bsc) + + source_blob_client = bsc.get_blob_client(self.container_name, self.get_resource_name("sourceblob")) + await source_blob_client.upload_blob(self.byte_data, blob_type=BlobType.PAGEBLOB, cpk=TEST_ENCRYPTION_KEY) + source_blob_sas = self.generate_sas( + generate_blob_sas, + source_blob_client.account_name, + source_blob_client.container_name, + source_blob_client.blob_name, + account_key=source_blob_client.credential.account_key, + permission=BlobSasPermissions(read=True), + expiry=datetime.utcnow() + timedelta(hours=1) + ) + source_blob_url = source_blob_client.url + "?" + source_blob_sas + + destination_blob_client = await self._create_page_blob(bsc, cpk=NEW_TEST_ENCRYPTION_KEY) + + # Act + props = await destination_blob_client.upload_pages_from_url( + source_blob_url, + offset=0, + length=len(self.byte_data), + source_offset=0, + cpk=NEW_TEST_ENCRYPTION_KEY, + source_cpk=TEST_ENCRYPTION_KEY + ) + + # Assert + assert props is not None + assert props['etag'] is not None + assert props['last_modified'] is not None + assert props['request_server_encrypted'] + + if self.is_live: + assert props['encryption_key_sha256'] == NEW_TEST_ENCRYPTION_KEY.key_hash + + self._teardown(bsc) diff --git a/sdk/storage/azure-storage-file-datalake/CHANGELOG.md b/sdk/storage/azure-storage-file-datalake/CHANGELOG.md index 7bc5f98c2df6..c394591de45e 100644 --- a/sdk/storage/azure-storage-file-datalake/CHANGELOG.md +++ b/sdk/storage/azure-storage-file-datalake/CHANGELOG.md @@ -1,8 +1,22 @@ # Release History -## 12.24.0b1 (Unreleased) +## 12.24.0b1 (2026-01-27) ### Features Added +- Added support for service version 2026-04-06. +- Added support for the keyword `user_delegation_tid` to `DataLakeServiceClient.get_user_delegation_key` +API, which can be used in `generate_file_system_sas`, `generate_directory_sas`, and `generate_file_sas` +to specify the Tenant ID that is authorized to use the generated SAS URL. Note that `user_delegation_tid` +must be used together with `user_delegation_oid`. +- Added support for the keyword `request_headers` to `generate_file_system_sas`, `generate_directory_sas`, +and `generate_file_sas` which specifies a set of headers and their corresponding values that +must be present in the request header when using the generated SAS. +- Added support for the keyword `request_query_params` to `generate_file_system_sas`, `generate_directory_sas`, +and `generate_file_sas` which specifies a set of query parameters and their corresponding values that +must be present in the request URL when using the generated SAS. + +### Other Changes +- Bumped minimum `azure-core` dependency to 1.37.0. ## 12.23.0 (2026-01-06) diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_file_client.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_file_client.py index 9c417c8d6f09..6641ea74f21f 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_file_client.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_file_client.py @@ -382,7 +382,7 @@ def set_file_expiry( @distributed_trace def upload_data( - self, data: Union[bytes, str, Iterable[AnyStr], IO[AnyStr]], + self, data: Union[bytes, str, Iterable[AnyStr], IO[bytes]], length: Optional[int] = None, overwrite: Optional[bool] = False, **kwargs: Any @@ -391,7 +391,7 @@ def upload_data( Upload data to a file. :param data: Content to be uploaded to file - :type data: bytes, str, Iterable[AnyStr], or IO[AnyStr] + :type data: Union[bytes, str, Iterable[AnyStr], IO[bytes]] :param int length: Size of the data in bytes. :param bool overwrite: to overwrite an existing file or not. :keyword ~azure.storage.filedatalake.ContentSettings content_settings: @@ -481,7 +481,7 @@ def upload_data( @distributed_trace def append_data( - self, data: Union[bytes, str, Iterable[AnyStr], IO[AnyStr]], + self, data: Union[bytes, Iterable[bytes], IO[bytes]], offset: int, length: Optional[int] = None, **kwargs: Any @@ -489,9 +489,11 @@ def append_data( """Append data to the file. :param data: Content to be appended to file - :type data: bytes, str, Iterable[AnyStr], or IO[AnyStr] + :type data: Union[bytes, Iterable[bytes], IO[bytes]] :param int offset: start position of the data to be appended to. - :param length: Size of the data in bytes. + :param length: + Size of the data to append. Optional if the length of data can be determined. For Iterable and IO, + if the length is not provided and cannot be determined, all data will be read into memory. :type length: int or None :keyword bool flush: If true, will commit the data after it is appended. diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_file_client.pyi b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_file_client.pyi index a1157de976fd..c7e7594894b3 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_file_client.pyi +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_file_client.pyi @@ -136,7 +136,7 @@ class DataLakeFileClient(PathClient): @distributed_trace def upload_data( self, - data: Union[bytes, str, Iterable[AnyStr], IO[AnyStr]], + data: Union[bytes, str, Iterable[AnyStr], IO[bytes]], length: Optional[int] = None, overwrite: Optional[bool] = False, *, @@ -161,7 +161,7 @@ class DataLakeFileClient(PathClient): @distributed_trace def append_data( self, - data: Union[bytes, str, Iterable[AnyStr], IO[AnyStr]], + data: Union[bytes, Iterable[bytes], IO[bytes]], offset: int, length: Optional[int] = None, *, diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_file_client_helpers.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_file_client_helpers.py index f40f27e6cbd6..6ce18867b4e5 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_file_client_helpers.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_file_client_helpers.py @@ -94,7 +94,7 @@ def _flush_data_options( def _upload_options( - data: Union[bytes, str, Iterable[AnyStr], AsyncIterable[AnyStr], IO[AnyStr]], + data: Union[bytes, str, Iterable[AnyStr], AsyncIterable[AnyStr], IO[bytes]], scheme: str, config: "StorageConfiguration", path: "PathOperations", diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_service_client.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_service_client.py index 66a91633f68c..b30a532af67e 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_service_client.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_service_client.py @@ -195,6 +195,8 @@ def from_connection_string( def get_user_delegation_key( self, key_start_time: "datetime", key_expiry_time: "datetime", + *, + delegated_user_tid: Optional[str] = None, **kwargs: Any ) -> UserDelegationKey: """ @@ -205,6 +207,7 @@ def get_user_delegation_key( A DateTime value. Indicates when the key becomes valid. :param ~datetime.datetime key_expiry_time: A DateTime value. Indicates when the key stops being valid. + :keyword str delegated_user_tid: The delegated user tenant id in Entra ID. :keyword int timeout: Sets the server-side timeout for the operation in seconds. For more details see https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. @@ -226,6 +229,7 @@ def get_user_delegation_key( delegation_key = self._blob_service_client.get_user_delegation_key( key_start_time=key_start_time, key_expiry_time=key_expiry_time, + delegated_user_tid=delegated_user_tid, **kwargs ) return UserDelegationKey._from_generated(delegation_key) # pylint: disable=protected-access diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_service_client.pyi b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_service_client.pyi index 80682f4ddc58..bdd196d3bdf1 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_service_client.pyi +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_service_client.pyi @@ -69,7 +69,13 @@ class DataLakeServiceClient(StorageAccountHostsMixin): ) -> Self: ... @distributed_trace def get_user_delegation_key( - self, key_start_time: datetime, key_expiry_time: datetime, *, timeout: Optional[int] = None, **kwargs: Any + self, + key_start_time: datetime, + key_expiry_time: datetime, + *, + delegated_user_tid: Optional[str] = None, + timeout: Optional[int] = None, + **kwargs: Any ) -> UserDelegationKey: ... @distributed_trace def list_file_systems( diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_utils/serialization.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_utils/serialization.py index ff543ed937ff..6da830e0cf4a 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_utils/serialization.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_utils/serialization.py @@ -821,13 +821,20 @@ def serialize_basic(cls, data, data_type, **kwargs): :param str data_type: Type of object in the iterable. :rtype: str, int, float, bool :return: serialized object + :raises TypeError: raise if data_type is not one of str, int, float, bool. """ custom_serializer = cls._get_custom_serializers(data_type, **kwargs) if custom_serializer: return custom_serializer(data) if data_type == "str": return cls.serialize_unicode(data) - return eval(data_type)(data) # nosec # pylint: disable=eval-used + if data_type == "int": + return int(data) + if data_type == "float": + return float(data) + if data_type == "bool": + return bool(data) + raise TypeError("Unknown basic data type: {}".format(data_type)) @classmethod def serialize_unicode(cls, data): @@ -1757,7 +1764,7 @@ def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return :param str data_type: deserialization data type. :return: Deserialized basic type. :rtype: str, int, float or bool - :raises TypeError: if string format is not valid. + :raises TypeError: if string format is not valid or data_type is not one of str, int, float, bool. """ # If we're here, data is supposed to be a basic type. # If it's still an XML node, take the text @@ -1783,7 +1790,11 @@ def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return if data_type == "str": return self.deserialize_unicode(attr) - return eval(data_type)(attr) # nosec # pylint: disable=eval-used + if data_type == "int": + return int(attr) + if data_type == "float": + return float(attr) + raise TypeError("Unknown basic data type: {}".format(data_type)) @staticmethod def deserialize_unicode(data): diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_path_operations.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_path_operations.py index 506fbc5fcdac..bb1ce5ce7e4e 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_path_operations.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_path_operations.py @@ -69,7 +69,7 @@ def __init__(self, *args, **kwargs) -> None: self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace_async - async def create( + async def create( # pylint: disable=too-many-locals self, request_id_parameter: Optional[str] = None, timeout: Optional[int] = None, @@ -329,7 +329,7 @@ async def create( return cls(pipeline_response, None, response_headers) # type: ignore @distributed_trace_async - async def update( + async def update( # pylint: disable=too-many-locals self, action: Union[str, _models.PathUpdateAction], mode: Union[str, _models.PathSetAccessControlRecursiveMode], @@ -1463,7 +1463,7 @@ async def set_access_control_recursive( return deserialized # type: ignore @distributed_trace_async - async def flush_data( + async def flush_data( # pylint: disable=too-many-locals self, timeout: Optional[int] = None, position: Optional[int] = None, @@ -1662,7 +1662,7 @@ async def flush_data( return cls(pipeline_response, None, response_headers) # type: ignore @distributed_trace_async - async def append_data( + async def append_data( # pylint: disable=too-many-locals self, body: IO[bytes], position: Optional[int] = None, diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_path_operations.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_path_operations.py index 9175e964f819..ccff697bed0e 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_path_operations.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_path_operations.py @@ -37,7 +37,7 @@ _SERIALIZER.client_side_validation = False -def build_create_request( +def build_create_request( # pylint: disable=too-many-locals,too-many-statements,too-many-branches url: str, *, request_id_parameter: Optional[str] = None, @@ -176,7 +176,7 @@ def build_create_request( return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) -def build_update_request( +def build_update_request( # pylint: disable=too-many-locals,too-many-statements,too-many-branches url: str, *, action: Union[str, _models.PathUpdateAction], @@ -653,7 +653,7 @@ def build_set_access_control_recursive_request( # pylint: disable=name-too-long return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) -def build_flush_data_request( +def build_flush_data_request( # pylint: disable=too-many-locals url: str, *, timeout: Optional[int] = None, @@ -754,7 +754,7 @@ def build_flush_data_request( return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) -def build_append_data_request( +def build_append_data_request( # pylint: disable=too-many-locals url: str, *, content: IO[bytes], @@ -946,7 +946,7 @@ def __init__(self, *args, **kwargs) -> None: self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace - def create( # pylint: disable=inconsistent-return-statements + def create( # pylint: disable=inconsistent-return-statements,too-many-locals self, request_id_parameter: Optional[str] = None, timeout: Optional[int] = None, @@ -1206,7 +1206,7 @@ def create( # pylint: disable=inconsistent-return-statements return cls(pipeline_response, None, response_headers) # type: ignore @distributed_trace - def update( + def update( # pylint: disable=too-many-locals self, action: Union[str, _models.PathUpdateAction], mode: Union[str, _models.PathSetAccessControlRecursiveMode], @@ -2340,7 +2340,7 @@ def set_access_control_recursive( return deserialized # type: ignore @distributed_trace - def flush_data( # pylint: disable=inconsistent-return-statements + def flush_data( # pylint: disable=inconsistent-return-statements,too-many-locals self, timeout: Optional[int] = None, position: Optional[int] = None, @@ -2539,7 +2539,7 @@ def flush_data( # pylint: disable=inconsistent-return-statements return cls(pipeline_response, None, response_headers) # type: ignore @distributed_trace - def append_data( # pylint: disable=inconsistent-return-statements + def append_data( # pylint: disable=inconsistent-return-statements,too-many-locals self, body: IO[bytes], position: Optional[int] = None, diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_models.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_models.py index 0f74d17ed6db..ee3ee10f5080 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_models.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_models.py @@ -999,6 +999,7 @@ def _from_generated(cls, generated): delegation_key = cls() delegation_key.signed_oid = generated.signed_oid delegation_key.signed_tid = generated.signed_tid + delegation_key.signed_delegated_user_tid = generated.signed_delegated_user_tid delegation_key.signed_start = generated.signed_start delegation_key.signed_expiry = generated.signed_expiry delegation_key.signed_service = generated.signed_service diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_serialize.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_serialize.py index 8ad645c6f088..d52a4c8ca621 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_serialize.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_serialize.py @@ -57,6 +57,7 @@ '2025-07-05', '2025-11-05', '2026-02-06', + '2026-04-06', ] # This list must be in chronological order! diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/models.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/models.py index 804260f09e83..ab5bc332c421 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/models.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/models.py @@ -89,6 +89,8 @@ class StorageErrorCode(str, Enum, metaclass=CaseInsensitiveEnumMeta): COPY_ID_MISMATCH = "CopyIdMismatch" FEATURE_VERSION_MISMATCH = "FeatureVersionMismatch" INCREMENTAL_COPY_BLOB_MISMATCH = "IncrementalCopyBlobMismatch" + INCREMENTAL_COPY_OF_EARLIER_SNAPSHOT_NOT_ALLOWED = "IncrementalCopyOfEarlierSnapshotNotAllowed" + #: Deprecated: Please use INCREMENTAL_COPY_OF_EARLIER_SNAPSHOT_NOT_ALLOWED instead. INCREMENTAL_COPY_OF_EARLIER_VERSION_SNAPSHOT_NOT_ALLOWED = "IncrementalCopyOfEarlierVersionSnapshotNotAllowed" #: Deprecated: Please use INCREMENTAL_COPY_OF_EARLIER_VERSION_SNAPSHOT_NOT_ALLOWED instead. INCREMENTAL_COPY_OF_ERALIER_VERSION_SNAPSHOT_NOT_ALLOWED = "IncrementalCopyOfEarlierVersionSnapshotNotAllowed" @@ -153,11 +155,15 @@ class StorageErrorCode(str, Enum, metaclass=CaseInsensitiveEnumMeta): # File values CANNOT_DELETE_FILE_OR_DIRECTORY = "CannotDeleteFileOrDirectory" CLIENT_CACHE_FLUSH_DELAY = "ClientCacheFlushDelay" + CONTAINER_QUOTA_DOWNGRADE_NOT_ALLOWED = "ContainerQuotaDowngradeNotAllowed" DELETE_PENDING = "DeletePending" DIRECTORY_NOT_EMPTY = "DirectoryNotEmpty" FILE_LOCK_CONFLICT = "FileLockConflict" FILE_SHARE_PROVISIONED_BANDWIDTH_DOWNGRADE_NOT_ALLOWED = "FileShareProvisionedBandwidthDowngradeNotAllowed" + FILE_SHARE_PROVISIONED_BANDWIDTH_INVALID = "FileShareProvisionedBandwidthInvalid" FILE_SHARE_PROVISIONED_IOPS_DOWNGRADE_NOT_ALLOWED = "FileShareProvisionedIopsDowngradeNotAllowed" + FILE_SHARE_PROVISIONED_IOPS_INVALID = "FileShareProvisionedIopsInvalid" + FILE_SHARE_PROVISIONED_STORAGE_INVALID = "FileShareProvisionedStorageInvalid" INVALID_FILE_OR_DIRECTORY_PATH_NAME = "InvalidFileOrDirectoryPathName" PARENT_NOT_FOUND = "ParentNotFound" READ_ONLY_ATTRIBUTE = "ReadOnlyAttribute" @@ -171,7 +177,10 @@ class StorageErrorCode(str, Enum, metaclass=CaseInsensitiveEnumMeta): SHARE_SNAPSHOT_NOT_FOUND = "ShareSnapshotNotFound" SHARE_SNAPSHOT_OPERATION_NOT_SUPPORTED = "ShareSnapshotOperationNotSupported" SHARE_HAS_SNAPSHOTS = "ShareHasSnapshots" - CONTAINER_QUOTA_DOWNGRADE_NOT_ALLOWED = "ContainerQuotaDowngradeNotAllowed" + TOTAL_SHARES_PROVISIONED_CAPACITY_EXCEEDS_ACCOUNT_LIMIT = "TotalSharesProvisionedCapacityExceedsAccountLimit" + TOTAL_SHARES_PROVISIONED_IOPS_EXCEEDS_ACCOUNT_LIMIT = "TotalSharesProvisionedIopsExceedsAccountLimit" + TOTAL_SHARES_PROVISIONED_BANDWIDTH_EXCEEDS_ACCOUNT_LIMIT = "TotalSharesProvisionedBandwidthExceedsAccountLimit" + TOTAL_SHARES_COUNT_EXCEEDS_ACCOUNT_LIMIT = "TotalSharesCountExceedsAccountLimit" # DataLake values CONTENT_LENGTH_MUST_BE_ZERO = "ContentLengthMustBeZero" @@ -517,6 +526,8 @@ class UserDelegationKey(object): """Object ID of this token.""" signed_tid: Optional[str] = None """Tenant ID of the tenant that issued this token.""" + signed_delegated_user_tid: Optional[str] = None + """User Tenant ID of this token.""" signed_start: Optional[str] = None """The datetime this token becomes valid.""" signed_expiry: Optional[str] = None @@ -531,6 +542,7 @@ class UserDelegationKey(object): def __init__(self): self.signed_oid = None self.signed_tid = None + self.signed_delegated_user_tid = None self.signed_start = None self.signed_expiry = None self.signed_service = None diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/response_handlers.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/response_handlers.py index 750838e3129a..9a079c56404f 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/response_handlers.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/response_handlers.py @@ -201,6 +201,7 @@ def parse_to_internal_user_delegation_key(service_user_delegation_key): internal_user_delegation_key = UserDelegationKey() internal_user_delegation_key.signed_oid = service_user_delegation_key.signed_oid internal_user_delegation_key.signed_tid = service_user_delegation_key.signed_tid + internal_user_delegation_key.signed_delegated_user_tid = service_user_delegation_key.signed_delegated_user_tid internal_user_delegation_key.signed_start = _to_utc_datetime(service_user_delegation_key.signed_start) internal_user_delegation_key.signed_expiry = _to_utc_datetime(service_user_delegation_key.signed_expiry) internal_user_delegation_key.signed_service = service_user_delegation_key.signed_service diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/shared_access_signature.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/shared_access_signature.py index 6556a066dde2..0e886f2bb35a 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/shared_access_signature.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/shared_access_signature.py @@ -42,6 +42,8 @@ class QueryStringConstants(object): SIGNED_KEY_SERVICE = "sks" SIGNED_KEY_VERSION = "skv" SIGNED_ENCRYPTION_SCOPE = "ses" + SIGNED_REQUEST_HEADERS = "srh" + SIGNED_REQUEST_QUERY_PARAMS = "srq" SIGNED_KEY_DELEGATED_USER_TID = "skdutid" SIGNED_DELEGATED_USER_OID = "sduoid" @@ -81,6 +83,8 @@ def to_list(): QueryStringConstants.SIGNED_KEY_SERVICE, QueryStringConstants.SIGNED_KEY_VERSION, QueryStringConstants.SIGNED_ENCRYPTION_SCOPE, + QueryStringConstants.SIGNED_REQUEST_HEADERS, + QueryStringConstants.SIGNED_REQUEST_QUERY_PARAMS, QueryStringConstants.SIGNED_KEY_DELEGATED_USER_TID, QueryStringConstants.SIGNED_DELEGATED_USER_OID, # for ADLS @@ -182,6 +186,10 @@ def __init__(self): self.query_dict = {} self.string_to_sign = "" + # STS-only values for dynamic user delegation SAS + self._sts_srh = "" # newline-delimited "k:v" + trailing newline (or empty) + self._sts_srq = "" # newline-delimited "k:v" + leading newline (or empty) + def _add_query(self, name, val): if val: self.query_dict[name] = str(val) if val is not None else None @@ -225,6 +233,28 @@ def add_override_response_headers( self._add_query(QueryStringConstants.SIGNED_CONTENT_LANGUAGE, content_language) self._add_query(QueryStringConstants.SIGNED_CONTENT_TYPE, content_type) + def add_request_headers(self, request_headers): + if not request_headers: + return + + # String-to-Sign (not encoded): "k1:v1\nk2:v2\n...kn:vn\n" + self._sts_srh = "\n".join([f"{k}:{v}" for k, v in request_headers.items()]) + "\n" + + # SAS query param: comma-separated list of encoded header keys only + srh_keys = ",".join([url_quote(k) for k in request_headers.keys()]) + self._add_query(QueryStringConstants.SIGNED_REQUEST_HEADERS, srh_keys) + + def add_request_query_params(self, request_query_params): + if not request_query_params: + return + + # String-to-Sign (not encoded): "k1:v1\nk2:v2\n...kn:vn\n" + self._sts_srq = "\n" + "\n".join([f"{k}:{v}" for k, v in request_query_params.items()]) + + # SAS query param: comma-separated list of encoded query-param keys only + srq_keys = ",".join([url_quote(k) for k in request_query_params.keys()]) + self._add_query(QueryStringConstants.SIGNED_REQUEST_QUERY_PARAMS, srq_keys) + def add_account_signature(self, account_name, account_key): def get_value_to_append(query): return_value = self.query_dict.get(query) or "" diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared_access_signature.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared_access_signature.py index 32b99fb0b43d..506721dd69ab 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared_access_signature.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared_access_signature.py @@ -6,7 +6,7 @@ # pylint: disable=docstring-keyword-should-match-keyword-only from typing import ( - Any, Callable, cast, Optional, Union, + Any, Callable, cast, Dict, Optional, Union, TYPE_CHECKING ) from urllib.parse import parse_qs @@ -109,6 +109,8 @@ def generate_file_system_sas( expiry: Optional[Union["datetime", str]] = None, *, user_delegation_oid: Optional[str] = None, + request_headers: Optional[Dict[str, str]] = None, + request_query_params: Optional[Dict[str, str]] = None, sts_hook: Optional[Callable[[str], None]] = None, **kwargs: Any ) -> str: @@ -198,6 +200,12 @@ def generate_file_system_sas( Specifies the Entra ID of the user that is authorized to use the resulting SAS URL. The resulting SAS URL must be used in conjunction with an Entra ID token that has been issued to the user specified in this value. + :keyword Dict[str, str] request_headers: + Specifies a set of headers and their corresponding values that + must be present in the request when using this SAS. + :keyword Dict[str, str] request_query_params: + Specifies a set of query parameters and their corresponding values that + must be present in the request when using this SAS. :keyword sts_hook: For debugging purposes only. If provided, the hook is called with the string to sign that was used to generate the SAS. @@ -213,6 +221,8 @@ def generate_file_system_sas( permission=cast(Optional[Union["ContainerSasPermissions", str]], permission), expiry=expiry, user_delegation_oid=user_delegation_oid, + request_headers=request_headers, + request_query_params=request_query_params, sts_hook=sts_hook, **kwargs ) @@ -227,6 +237,8 @@ def generate_directory_sas( expiry: Optional[Union["datetime", str]] = None, *, user_delegation_oid: Optional[str] = None, + request_headers: Optional[Dict[str, str]] = None, + request_query_params: Optional[Dict[str, str]] = None, sts_hook: Optional[Callable[[str], None]] = None, **kwargs: Any ) -> str: @@ -318,6 +330,12 @@ def generate_directory_sas( Specifies the Entra ID of the user that is authorized to use the resulting SAS URL. The resulting SAS URL must be used in conjunction with an Entra ID token that has been issued to the user specified in this value. + :keyword Dict[str, str] request_headers: + Specifies a set of headers and their corresponding values that + must be present in the request when using this SAS. + :keyword Dict[str, str] request_query_params: + Specifies a set of query parameters and their corresponding values that + must be present in the request when using this SAS. :keyword sts_hook: For debugging purposes only. If provided, the hook is called with the string to sign that was used to generate the SAS. @@ -337,6 +355,8 @@ def generate_directory_sas( sdd=depth, is_directory=True, user_delegation_oid=user_delegation_oid, + request_headers=request_headers, + request_query_params=request_query_params, sts_hook=sts_hook, **kwargs ) @@ -352,6 +372,8 @@ def generate_file_sas( expiry: Optional[Union["datetime", str]] = None, *, user_delegation_oid: Optional[str] = None, + request_headers: Optional[Dict[str, str]] = None, + request_query_params: Optional[Dict[str, str]] = None, sts_hook: Optional[Callable[[str], None]] = None, **kwargs: Any ) -> str: @@ -445,6 +467,12 @@ def generate_file_sas( Specifies the Entra ID of the user that is authorized to use the resulting SAS URL. The resulting SAS URL must be used in conjunction with an Entra ID token that has been issued to the user specified in this value. + :keyword Dict[str, str] request_headers: + Specifies a set of headers and their corresponding values that + must be present in the request when using this SAS. + :keyword Dict[str, str] request_query_params: + Specifies a set of query parameters and their corresponding values that + must be present in the request when using this SAS. :keyword sts_hook: For debugging purposes only. If provided, the hook is called with the string to sign that was used to generate the SAS. @@ -464,8 +492,10 @@ def generate_file_sas( user_delegation_key=credential if not isinstance(credential, str) else None, permission=cast(Optional[Union["BlobSasPermissions", str]], permission), expiry=expiry, - sts_hook=sts_hook, user_delegation_oid=user_delegation_oid, + request_headers=request_headers, + request_query_params=request_query_params, + sts_hook=sts_hook, **kwargs ) diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_file_client_async.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_file_client_async.py index b6e2cc192134..1edf3832a690 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_file_client_async.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_file_client_async.py @@ -7,7 +7,7 @@ from datetime import datetime from typing import ( - Any, AnyStr, AsyncIterable, cast, Dict, IO, Optional, Union, + Any, AnyStr, AsyncIterable, cast, Dict, IO, Iterable, Optional, Union, TYPE_CHECKING ) from urllib.parse import quote, unquote @@ -398,7 +398,7 @@ async def set_file_expiry( @distributed_trace_async async def upload_data( - self, data: Union[bytes, str, AsyncIterable[AnyStr], IO[AnyStr]], + self, data: Union[bytes, str, Iterable[AnyStr], AsyncIterable[AnyStr], IO[bytes]], length: Optional[int] = None, overwrite: Optional[bool] = False, **kwargs: Any @@ -407,7 +407,7 @@ async def upload_data( Upload data to a file. :param data: Content to be uploaded to file - :type data: bytes, str, AsyncIterable[AnyStr], or IO[AnyStr] + :type data: Union[bytes, str, Iterable[AnyStr], AsyncIterable[AnyStr], IO[bytes]] :param int length: Size of the data in bytes. :param bool overwrite: to overwrite an existing file or not. :keyword ~azure.storage.filedatalake.ContentSettings content_settings: @@ -497,7 +497,7 @@ async def upload_data( @distributed_trace_async async def append_data( - self, data: Union[bytes, str, AsyncIterable[AnyStr], IO[AnyStr]], + self, data: Union[bytes, Iterable[bytes], AsyncIterable[bytes], IO[bytes]], offset: int, length: Optional[int] = None, **kwargs: Any @@ -505,9 +505,11 @@ async def append_data( """Append data to the file. :param data: Content to be appended to file - :type data: bytes, str, AsyncIterable[AnyStr], or IO[AnyStr] + :type data: Union[bytes, Iterable[bytes], AsyncIterable[bytes], IO[bytes]] :param int offset: start position of the data to be appended to. - :param length: Size of the data in bytes. + :param length: + Size of the data to append. Optional if the length of data can be determined. For Iterable and IO, + if the length is not provided and cannot be determined, all data will be read into memory. :type length: int or None :keyword bool flush: If true, will commit the data after it is appended. diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_file_client_async.pyi b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_file_client_async.pyi index a85ce6c5ef22..8da588a32f51 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_file_client_async.pyi +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_file_client_async.pyi @@ -14,6 +14,7 @@ from typing import ( Callable, Dict, IO, + Iterable, Literal, Optional, Union, @@ -134,7 +135,7 @@ class DataLakeFileClient(PathClient): @distributed_trace_async async def upload_data( self, - data: Union[bytes, str, AsyncIterable[AnyStr], IO[AnyStr]], + data: Union[bytes, str, Iterable[AnyStr], AsyncIterable[AnyStr], IO[bytes]], length: Optional[int] = None, overwrite: Optional[bool] = False, *, @@ -159,7 +160,7 @@ class DataLakeFileClient(PathClient): @distributed_trace_async async def append_data( self, - data: Union[bytes, str, AsyncIterable[AnyStr], IO[AnyStr]], + data: Union[bytes, Iterable[bytes], AsyncIterable[bytes], IO[bytes]], offset: int, length: Optional[int] = None, *, diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_service_client_async.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_service_client_async.py index 0377dcbd15e0..5ff3b28659a3 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_service_client_async.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_service_client_async.py @@ -201,6 +201,8 @@ def from_connection_string( async def get_user_delegation_key( self, key_start_time: "datetime", key_expiry_time: "datetime", + *, + delegated_user_tid: Optional[str] = None, **kwargs: Any ) -> UserDelegationKey: """ @@ -211,6 +213,7 @@ async def get_user_delegation_key( A DateTime value. Indicates when the key becomes valid. :param ~datetime.datetime key_expiry_time: A DateTime value. Indicates when the key stops being valid. + :keyword str delegated_user_tid: The delegated user tenant id in Entra ID. :keyword int timeout: Sets the server-side timeout for the operation in seconds. For more details see https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. @@ -232,6 +235,7 @@ async def get_user_delegation_key( delegation_key = await self._blob_service_client.get_user_delegation_key( key_start_time=key_start_time, key_expiry_time=key_expiry_time, + delegated_user_tid=delegated_user_tid, **kwargs ) return UserDelegationKey._from_generated(delegation_key) # pylint: disable=protected-access diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_service_client_async.pyi b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_service_client_async.pyi index d1a00b4397a3..63f811a30574 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_service_client_async.pyi +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_service_client_async.pyi @@ -75,7 +75,13 @@ class DataLakeServiceClient(AsyncStorageAccountHostsMixin, StorageAccountHostsMi ) -> Self: ... @distributed_trace_async async def get_user_delegation_key( - self, key_start_time: datetime, key_expiry_time: datetime, *, timeout: Optional[int] = None, **kwargs: Any + self, + key_start_time: datetime, + key_expiry_time: datetime, + *, + delegated_user_tid: Optional[str] = None, + timeout: Optional[int] = None, + **kwargs: Any ) -> UserDelegationKey: ... @distributed_trace def list_file_systems( diff --git a/sdk/storage/azure-storage-file-datalake/setup.py b/sdk/storage/azure-storage-file-datalake/setup.py index 7642733c85aa..4da70e2ca0ca 100644 --- a/sdk/storage/azure-storage-file-datalake/setup.py +++ b/sdk/storage/azure-storage-file-datalake/setup.py @@ -78,14 +78,14 @@ ]), python_requires=">=3.9", install_requires=[ - "azure-core>=1.30.0", + "azure-core>=1.37.0", "azure-storage-blob>=12.29.0b1", "typing-extensions>=4.6.0", "isodate>=0.6.1" ], extras_require={ "aio": [ - "azure-core[aio]>=1.30.0", + "azure-core[aio]>=1.37.0", ], }, ) diff --git a/sdk/storage/azure-storage-file-datalake/tests/test_file.py b/sdk/storage/azure-storage-file-datalake/tests/test_file.py index 8dc309f0a009..e3b39f23b7e9 100644 --- a/sdk/storage/azure-storage-file-datalake/tests/test_file.py +++ b/sdk/storage/azure-storage-file-datalake/tests/test_file.py @@ -7,6 +7,7 @@ import unittest from datetime import datetime, timedelta from math import ceil +from urllib.parse import quote, urlencode import pytest from azure.core import MatchConditions @@ -1779,6 +1780,62 @@ def test_download_file_no_decompress_chunks(self, **kwargs): result = file_client.download_file(decompress=False).readall() assert result == compressed_data + @pytest.mark.live_test_only + @DataLakePreparer() + def test_datalake_dynamic_user_delegation_sas(self, **kwargs): + datalake_storage_account_name = kwargs.pop("datalake_storage_account_name") + + token_credential = self.get_credential(DataLakeServiceClient) + dsc = DataLakeServiceClient(self.account_url(datalake_storage_account_name, "dfs"), credential=token_credential) + fs_name, file_name = self.get_resource_name('filesystem'), self.get_resource_name('file') + fs = dsc.create_file_system(fs_name) + file = fs.create_file(file_name) + file.upload_data(b"abc", overwrite=True) + + user_delegation_key = dsc.get_user_delegation_key( + key_start_time=datetime.utcnow(), + key_expiry_time=datetime.utcnow() + timedelta(hours=1), + ) + + request_headers = { + "foo$": "bar!", + "company": "msft", + "city": "redmond,atlanta,reston", + } + + request_query_params = { + "hello$": "world!", + "check": "spelling", + "firstName": "john,Tim", + } + + sas_token = generate_file_sas( + file.account_name, + file.file_system_name, + None, + file.path_name, + user_delegation_key, + permission=FileSasPermissions(read=True), + expiry=datetime.utcnow() + timedelta(hours=1), + request_headers=request_headers, + request_query_params=request_query_params + ) + + def callback(request): + for k, v in request_headers.items(): + request.http_request.headers[k] = v + extra = urlencode(request_query_params, quote_via=quote, safe="") + request.http_request.url = request.http_request.url + "&" + extra + + identity_file = DataLakeFileClient( + self.account_url(datalake_storage_account_name, 'dfs'), + file.file_system_name, + file.path_name, + credential=sas_token + ) + props = identity_file.get_file_properties(raw_request_hook=callback) + assert props is not None + # ------------------------------------------------------------------------------ if __name__ == '__main__': unittest.main() diff --git a/sdk/storage/azure-storage-file-datalake/tests/test_file_async.py b/sdk/storage/azure-storage-file-datalake/tests/test_file_async.py index 9d21e945d4bf..b0c136501212 100644 --- a/sdk/storage/azure-storage-file-datalake/tests/test_file_async.py +++ b/sdk/storage/azure-storage-file-datalake/tests/test_file_async.py @@ -8,6 +8,7 @@ import unittest from datetime import datetime, timedelta from math import ceil +from urllib.parse import quote, urlencode import pytest from azure.core import MatchConditions @@ -1683,6 +1684,62 @@ async def test_download_file_no_decompress_chunks(self, **kwargs): result = await (await file_client.download_file(decompress=False)).readall() assert result == compressed_data + @pytest.mark.live_test_only + @DataLakePreparer() + async def test_datalake_dynamic_user_delegation_sas(self, **kwargs): + datalake_storage_account_name = kwargs.pop("datalake_storage_account_name") + + token_credential = self.get_credential(DataLakeServiceClient, is_async=True) + dsc = DataLakeServiceClient(self.account_url(datalake_storage_account_name, "dfs"), credential=token_credential) + fs_name, file_name = self.get_resource_name('filesystem'), self.get_resource_name('file') + fs = await dsc.create_file_system(fs_name) + file = await fs.create_file(file_name) + await file.upload_data(b"abc", overwrite=True) + + user_delegation_key = await dsc.get_user_delegation_key( + key_start_time=datetime.utcnow(), + key_expiry_time=datetime.utcnow() + timedelta(hours=1), + ) + + request_headers = { + "foo$": "bar!", + "company": "msft", + "city": "redmond,atlanta,reston", + } + + request_query_params = { + "hello$": "world!", + "check": "spelling", + "firstName": "john,Tim", + } + + sas_token = generate_file_sas( + file.account_name, + file.file_system_name, + None, + file.path_name, + user_delegation_key, + permission=FileSasPermissions(read=True), + expiry=datetime.utcnow() + timedelta(hours=1), + request_headers=request_headers, + request_query_params=request_query_params + ) + + def callback(request): + for k, v in request_headers.items(): + request.http_request.headers[k] = v + extra = urlencode(request_query_params, quote_via=quote, safe="") + request.http_request.url = request.http_request.url + "&" + extra + + identity_file = DataLakeFileClient( + self.account_url(datalake_storage_account_name, 'dfs'), + file.file_system_name, + file.path_name, + credential=sas_token + ) + props = identity_file.get_file_properties(raw_request_hook=callback) + assert props is not None + # ------------------------------------------------------------------------------ if __name__ == '__main__': unittest.main() diff --git a/sdk/storage/azure-storage-file-datalake/tests/test_file_system.py b/sdk/storage/azure-storage-file-datalake/tests/test_file_system.py index 83fa05b5d886..b82cba4cfe35 100644 --- a/sdk/storage/azure-storage-file-datalake/tests/test_file_system.py +++ b/sdk/storage/azure-storage-file-datalake/tests/test_file_system.py @@ -1212,9 +1212,8 @@ def test_get_user_delegation_sas(self, **kwargs): @pytest.mark.live_test_only @DataLakePreparer() - def test_datalake_user_delegation_oid(self, **kwargs): + def test_datalake_cross_tenant_delegation_sas(self, **kwargs): datalake_storage_account_name = kwargs.pop("datalake_storage_account_name") - data = b"abc123" token_credential = self.get_credential(DataLakeServiceClient) account_url = self.account_url(datalake_storage_account_name, "dfs") @@ -1225,13 +1224,23 @@ def test_datalake_user_delegation_oid(self, **kwargs): directory = file_system.create_directory(directory_name) file_name = "file" file = directory.create_file(file_name) + data = b"abc123" file.upload_data(data, length=len(data), overwrite=True) start = datetime.utcnow() expiry = datetime.utcnow() + timedelta(hours=1) - user_delegation_key = dsc.get_user_delegation_key(key_start_time=start, key_expiry_time=expiry) token = token_credential.get_token("https://storage.azure.com/.default") - user_delegation_oid = jwt.decode(token.token, options={"verify_signature": False}).get("oid") + decoded = jwt.decode(token.token, options={"verify_signature": False}) + user_delegation_oid = decoded.get("oid") + delegated_user_tid = decoded.get("tid") + user_delegation_key = dsc.get_user_delegation_key( + key_start_time=start, + key_expiry_time=expiry, + delegated_user_tid=delegated_user_tid + ) + + assert user_delegation_key is not None + assert user_delegation_key.signed_delegated_user_tid == delegated_user_tid file_system_token = self.generate_sas( generate_file_system_sas, @@ -1242,6 +1251,10 @@ def test_datalake_user_delegation_oid(self, **kwargs): expiry=expiry, user_delegation_oid=user_delegation_oid, ) + + assert "sduoid=" + user_delegation_oid in file_system_token + assert "skdutid=" + delegated_user_tid in file_system_token + file_system_client = FileSystemClient( f"{account_url}?{file_system_token}", file_system_name=file_system_name, @@ -1262,6 +1275,10 @@ def test_datalake_user_delegation_oid(self, **kwargs): expiry=expiry, user_delegation_oid=user_delegation_oid ) + + assert "sduoid=" + user_delegation_oid in directory_token + assert "skdutid=" + delegated_user_tid in directory_token + directory_client = DataLakeDirectoryClient( f"{account_url}?{directory_token}", file_system_name=file_system_name, @@ -1282,6 +1299,10 @@ def test_datalake_user_delegation_oid(self, **kwargs): expiry=expiry, user_delegation_oid=user_delegation_oid ) + + assert "sduoid=" + user_delegation_oid in file_token + assert "skdutid=" + delegated_user_tid in file_token + file_client = DataLakeFileClient( f"{account_url}?{file_token}", file_system_name=file_system_name, diff --git a/sdk/storage/azure-storage-file-datalake/tests/test_file_system_async.py b/sdk/storage/azure-storage-file-datalake/tests/test_file_system_async.py index aefc31bc13da..cdd419b30989 100644 --- a/sdk/storage/azure-storage-file-datalake/tests/test_file_system_async.py +++ b/sdk/storage/azure-storage-file-datalake/tests/test_file_system_async.py @@ -1344,9 +1344,8 @@ async def test_get_user_delegation_sas(self, **kwargs): @pytest.mark.live_test_only @DataLakePreparer() - async def test_datalake_user_delegation_oid(self, **kwargs): + async def test_datalake_cross_tenant_delegation_sas(self, **kwargs): datalake_storage_account_name = kwargs.pop("datalake_storage_account_name") - data = b"abc123" token_credential = self.get_credential(DataLakeServiceClient, is_async=True) account_url = self.account_url(datalake_storage_account_name, "dfs") @@ -1357,13 +1356,23 @@ async def test_datalake_user_delegation_oid(self, **kwargs): directory = await file_system.create_directory(directory_name) file_name = "file" file = await directory.create_file(file_name) + data = b"abc123" await file.upload_data(data, length=len(data), overwrite=True) start = datetime.utcnow() expiry = datetime.utcnow() + timedelta(hours=1) - user_delegation_key = await dsc.get_user_delegation_key(key_start_time=start, key_expiry_time=expiry) token = await token_credential.get_token("https://storage.azure.com/.default") - user_delegation_oid = jwt.decode(token.token, options={"verify_signature": False}).get("oid") + decoded = jwt.decode(token.token, options={"verify_signature": False}) + user_delegation_oid = decoded.get("oid") + delegated_user_tid = decoded.get("tid") + user_delegation_key = await dsc.get_user_delegation_key( + key_start_time=start, + key_expiry_time=expiry, + delegated_user_tid=delegated_user_tid + ) + + assert user_delegation_key is not None + assert user_delegation_key.signed_delegated_user_tid == delegated_user_tid file_system_token = self.generate_sas( generate_file_system_sas, @@ -1374,13 +1383,18 @@ async def test_datalake_user_delegation_oid(self, **kwargs): expiry=expiry, user_delegation_oid=user_delegation_oid, ) + + assert "sduoid=" + user_delegation_oid in file_system_token + assert "skdutid=" + delegated_user_tid in file_system_token + file_system_client = FileSystemClient( f"{account_url}?{file_system_token}", file_system_name=file_system_name, credential=token_credential ) + paths = [] - async for path in file_system_client.get_paths() : + async for path in file_system_client.get_paths(): paths.append(path) assert len(paths) == 2 @@ -1397,6 +1411,10 @@ async def test_datalake_user_delegation_oid(self, **kwargs): expiry=expiry, user_delegation_oid=user_delegation_oid ) + + assert "sduoid=" + user_delegation_oid in directory_token + assert "skdutid=" + delegated_user_tid in directory_token + directory_client = DataLakeDirectoryClient( f"{account_url}?{directory_token}", file_system_name=file_system_name, @@ -1417,6 +1435,10 @@ async def test_datalake_user_delegation_oid(self, **kwargs): expiry=expiry, user_delegation_oid=user_delegation_oid ) + + assert "sduoid=" + user_delegation_oid in file_token + assert "skdutid=" + delegated_user_tid in file_token + file_client = DataLakeFileClient( f"{account_url}?{file_token}", file_system_name=file_system_name, diff --git a/sdk/storage/azure-storage-file-share/CHANGELOG.md b/sdk/storage/azure-storage-file-share/CHANGELOG.md index 27c0c5ac3e30..948dba1e270d 100644 --- a/sdk/storage/azure-storage-file-share/CHANGELOG.md +++ b/sdk/storage/azure-storage-file-share/CHANGELOG.md @@ -1,8 +1,16 @@ # Release History -## 12.25.0b1 (Unreleased) +## 12.25.0b1 (2026-01-27) ### Features Added +- Added support for service version 2026-04-06. +- Added support for improved error handling for file share provisioning. +- Added support for the keyword `user_delegation_tid` to `ShareServiceClient.get_user_delegation_key` API, which +can be used in `generate_share_sas` and `generate_file_sas` to specify the Tenant ID that is authorized +to use the generated SAS URL. Note that `user_delegation_tid` must be used together with `user_delegation_oid`. + +### Other Changes +- Bumped minimum `azure-core` dependency to 1.37.0. ## 12.24.0 (2026-01-06) diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_directory_client.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_directory_client.py index 85af4243b8f7..223b7fbf6360 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_directory_client.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_directory_client.py @@ -131,11 +131,15 @@ def __init__( self.allow_trailing_dot = kwargs.pop('allow_trailing_dot', None) self.allow_source_trailing_dot = kwargs.pop('allow_source_trailing_dot', None) self.file_request_intent = token_intent - self._client = AzureFileStorage(url=self.url, base_url=self.url, pipeline=self._pipeline, - allow_trailing_dot=self.allow_trailing_dot, - allow_source_trailing_dot=self.allow_source_trailing_dot, - file_request_intent=self.file_request_intent) - self._client._config.version = get_api_version(kwargs) # type: ignore [assignment] + self._client = AzureFileStorage( + version=get_api_version(kwargs), + url=self.url, + base_url=self.url, + pipeline=self._pipeline, + allow_trailing_dot=self.allow_trailing_dot, + allow_source_trailing_dot=self.allow_source_trailing_dot, + file_request_intent=self.file_request_intent + ) def __enter__(self) -> Self: self._client.__enter__() diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_file_client.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_file_client.py index 367cf45be2d0..ed032562a768 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_file_client.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_file_client.py @@ -6,12 +6,11 @@ # pylint: disable=docstring-keyword-should-match-keyword-only, too-many-lines, too-many-public-methods import functools -import sys import time from datetime import datetime from io import BytesIO from typing import ( - Any, AnyStr, Callable, cast, Dict, IO, Iterable, List, Optional, Tuple, Union, + Any, AnyStr, Callable, cast, Dict, IO, Iterable, List, Literal, Optional, Tuple, Union, TYPE_CHECKING ) from typing_extensions import Self @@ -46,11 +45,6 @@ from ._shared.response_handlers import return_response_headers, process_storage_error from ._shared.uploads import IterStreamer, FileChunkUploader, upload_data_chunks -if sys.version_info >= (3, 8): - from typing import Literal -else: - from typing_extensions import Literal - if TYPE_CHECKING: from azure.core.credentials import AzureNamedKeyCredential, AzureSasCredential, TokenCredential from ._models import ContentSettings, NTFSAttributes @@ -196,11 +190,15 @@ def __init__( self.allow_trailing_dot = kwargs.pop('allow_trailing_dot', None) self.allow_source_trailing_dot = kwargs.pop('allow_source_trailing_dot', None) self.file_request_intent = token_intent - self._client = AzureFileStorage(url=self.url, base_url=self.url, pipeline=self._pipeline, - allow_trailing_dot=self.allow_trailing_dot, - allow_source_trailing_dot=self.allow_source_trailing_dot, - file_request_intent=self.file_request_intent) - self._client._config.version = get_api_version(kwargs) # type: ignore [assignment] + self._client = AzureFileStorage( + version=get_api_version(kwargs), + url=self.url, + base_url=self.url, + pipeline=self._pipeline, + allow_trailing_dot=self.allow_trailing_dot, + allow_source_trailing_dot=self.allow_source_trailing_dot, + file_request_intent=self.file_request_intent + ) def __enter__(self) -> Self: self._client.__enter__() @@ -498,7 +496,7 @@ def create_file( @distributed_trace def upload_file( - self, data: Union[bytes, str, Iterable[AnyStr], IO[AnyStr]], + self, data: Union[bytes, str, Iterable[AnyStr], IO[bytes]], length: Optional[int] = None, file_attributes: Optional[Union[str, "NTFSAttributes"]] = None, file_creation_time: Optional[Union[str, datetime]] = None, @@ -511,9 +509,9 @@ def upload_file( :param data: Content of the file. - :type data: Union[bytes, str, Iterable[AnyStr], IO[AnyStr]] + :type data: Union[bytes, str, Iterable[AnyStr], IO[bytes]] :param int length: - Length of the file in bytes. Specify its maximum size, up to 1 TiB. + Length of the file in bytes. :param file_attributes: The file system attributes for files and directories. If not set, the default value would be "None" and the attributes will be set to "Archive". diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_file_client.pyi b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_file_client.pyi index ebb095b0a55a..3fc1fb454d86 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_file_client.pyi +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_file_client.pyi @@ -138,7 +138,7 @@ class ShareFileClient(StorageAccountHostsMixin): @distributed_trace def upload_file( self, - data: Union[bytes, str, Iterable[AnyStr], IO[AnyStr]], + data: Union[bytes, str, Iterable[AnyStr], IO[bytes]], length: Optional[int] = None, file_attributes: Optional[Union[str, NTFSAttributes]] = None, file_creation_time: Optional[Union[str, datetime]] = None, diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/_azure_file_storage.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/_azure_file_storage.py index 3807bbe8402e..cea1a9e2ac4c 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/_azure_file_storage.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/_azure_file_storage.py @@ -31,6 +31,8 @@ class AzureFileStorage: # pylint: disable=client-accepts-api-version-keyword :vartype directory: azure.storage.fileshare.operations.DirectoryOperations :ivar file: FileOperations operations :vartype file: azure.storage.fileshare.operations.FileOperations + :param version: Specifies the version of the operation to use for this request. Required. + :type version: str :param url: The URL of the service account, share, directory or file that is the target of the desired operation. Required. :type url: str @@ -44,9 +46,6 @@ class AzureFileStorage: # pylint: disable=client-accepts-api-version-keyword :param allow_source_trailing_dot: If true, the trailing dot will not be trimmed from the source URI. Default value is None. :type allow_source_trailing_dot: bool - :keyword version: Specifies the version of the operation to use for this request. Default value - is "2026-02-06". Note that overriding this default value may result in unsupported behavior. - :paramtype version: str :keyword file_range_write_from_url: Only update is supported: - Update: Writes the bytes downloaded from the source url into the specified range. Default value is "update". Note that overriding this default value may result in unsupported behavior. @@ -55,6 +54,7 @@ class AzureFileStorage: # pylint: disable=client-accepts-api-version-keyword def __init__( # pylint: disable=missing-client-constructor-parameter-credential self, + version: str, url: str, base_url: str = "", file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None, @@ -63,6 +63,7 @@ def __init__( # pylint: disable=missing-client-constructor-parameter-credential **kwargs: Any ) -> None: self._config = AzureFileStorageConfiguration( + version=version, url=url, file_request_intent=file_request_intent, allow_trailing_dot=allow_trailing_dot, diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/_configuration.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/_configuration.py index 827328f9ffe3..51f09f3c22f7 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/_configuration.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/_configuration.py @@ -21,6 +21,8 @@ class AzureFileStorageConfiguration: # pylint: disable=too-many-instance-attrib Note that all parameters used to create this instance are saved as instance attributes. + :param version: Specifies the version of the operation to use for this request. Required. + :type version: str :param url: The URL of the service account, share, directory or file that is the target of the desired operation. Required. :type url: str @@ -32,9 +34,6 @@ class AzureFileStorageConfiguration: # pylint: disable=too-many-instance-attrib :param allow_source_trailing_dot: If true, the trailing dot will not be trimmed from the source URI. Default value is None. :type allow_source_trailing_dot: bool - :keyword version: Specifies the version of the operation to use for this request. Default value - is "2026-02-06". Note that overriding this default value may result in unsupported behavior. - :paramtype version: str :keyword file_range_write_from_url: Only update is supported: - Update: Writes the bytes downloaded from the source url into the specified range. Default value is "update". Note that overriding this default value may result in unsupported behavior. @@ -43,23 +42,25 @@ class AzureFileStorageConfiguration: # pylint: disable=too-many-instance-attrib def __init__( self, + version: str, url: str, file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None, allow_trailing_dot: Optional[bool] = None, allow_source_trailing_dot: Optional[bool] = None, **kwargs: Any ) -> None: - version: Literal["2026-02-06"] = kwargs.pop("version", "2026-02-06") file_range_write_from_url: Literal["update"] = kwargs.pop("file_range_write_from_url", "update") + if version is None: + raise ValueError("Parameter 'version' must not be None.") if url is None: raise ValueError("Parameter 'url' must not be None.") + self.version = version self.url = url self.file_request_intent = file_request_intent self.allow_trailing_dot = allow_trailing_dot self.allow_source_trailing_dot = allow_source_trailing_dot - self.version = version self.file_range_write_from_url = file_range_write_from_url kwargs.setdefault("sdk_moniker", "azurefilestorage/{}".format(VERSION)) self.polling_interval = kwargs.get("polling_interval", 30) diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/_utils/serialization.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/_utils/serialization.py index ff543ed937ff..6da830e0cf4a 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/_utils/serialization.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/_utils/serialization.py @@ -821,13 +821,20 @@ def serialize_basic(cls, data, data_type, **kwargs): :param str data_type: Type of object in the iterable. :rtype: str, int, float, bool :return: serialized object + :raises TypeError: raise if data_type is not one of str, int, float, bool. """ custom_serializer = cls._get_custom_serializers(data_type, **kwargs) if custom_serializer: return custom_serializer(data) if data_type == "str": return cls.serialize_unicode(data) - return eval(data_type)(data) # nosec # pylint: disable=eval-used + if data_type == "int": + return int(data) + if data_type == "float": + return float(data) + if data_type == "bool": + return bool(data) + raise TypeError("Unknown basic data type: {}".format(data_type)) @classmethod def serialize_unicode(cls, data): @@ -1757,7 +1764,7 @@ def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return :param str data_type: deserialization data type. :return: Deserialized basic type. :rtype: str, int, float or bool - :raises TypeError: if string format is not valid. + :raises TypeError: if string format is not valid or data_type is not one of str, int, float, bool. """ # If we're here, data is supposed to be a basic type. # If it's still an XML node, take the text @@ -1783,7 +1790,11 @@ def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return if data_type == "str": return self.deserialize_unicode(attr) - return eval(data_type)(attr) # nosec # pylint: disable=eval-used + if data_type == "int": + return int(attr) + if data_type == "float": + return float(attr) + raise TypeError("Unknown basic data type: {}".format(data_type)) @staticmethod def deserialize_unicode(data): diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/_azure_file_storage.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/_azure_file_storage.py index dce7ddfcba82..4aac6aa9a1d4 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/_azure_file_storage.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/_azure_file_storage.py @@ -31,6 +31,8 @@ class AzureFileStorage: # pylint: disable=client-accepts-api-version-keyword :vartype directory: azure.storage.fileshare.aio.operations.DirectoryOperations :ivar file: FileOperations operations :vartype file: azure.storage.fileshare.aio.operations.FileOperations + :param version: Specifies the version of the operation to use for this request. Required. + :type version: str :param url: The URL of the service account, share, directory or file that is the target of the desired operation. Required. :type url: str @@ -44,9 +46,6 @@ class AzureFileStorage: # pylint: disable=client-accepts-api-version-keyword :param allow_source_trailing_dot: If true, the trailing dot will not be trimmed from the source URI. Default value is None. :type allow_source_trailing_dot: bool - :keyword version: Specifies the version of the operation to use for this request. Default value - is "2026-02-06". Note that overriding this default value may result in unsupported behavior. - :paramtype version: str :keyword file_range_write_from_url: Only update is supported: - Update: Writes the bytes downloaded from the source url into the specified range. Default value is "update". Note that overriding this default value may result in unsupported behavior. @@ -55,6 +54,7 @@ class AzureFileStorage: # pylint: disable=client-accepts-api-version-keyword def __init__( # pylint: disable=missing-client-constructor-parameter-credential self, + version: str, url: str, base_url: str = "", file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None, @@ -63,6 +63,7 @@ def __init__( # pylint: disable=missing-client-constructor-parameter-credential **kwargs: Any ) -> None: self._config = AzureFileStorageConfiguration( + version=version, url=url, file_request_intent=file_request_intent, allow_trailing_dot=allow_trailing_dot, diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/_configuration.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/_configuration.py index e61a4ca5cf27..bc7aa571cdd4 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/_configuration.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/_configuration.py @@ -21,6 +21,8 @@ class AzureFileStorageConfiguration: # pylint: disable=too-many-instance-attrib Note that all parameters used to create this instance are saved as instance attributes. + :param version: Specifies the version of the operation to use for this request. Required. + :type version: str :param url: The URL of the service account, share, directory or file that is the target of the desired operation. Required. :type url: str @@ -32,9 +34,6 @@ class AzureFileStorageConfiguration: # pylint: disable=too-many-instance-attrib :param allow_source_trailing_dot: If true, the trailing dot will not be trimmed from the source URI. Default value is None. :type allow_source_trailing_dot: bool - :keyword version: Specifies the version of the operation to use for this request. Default value - is "2026-02-06". Note that overriding this default value may result in unsupported behavior. - :paramtype version: str :keyword file_range_write_from_url: Only update is supported: - Update: Writes the bytes downloaded from the source url into the specified range. Default value is "update". Note that overriding this default value may result in unsupported behavior. @@ -43,23 +42,25 @@ class AzureFileStorageConfiguration: # pylint: disable=too-many-instance-attrib def __init__( self, + version: str, url: str, file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None, allow_trailing_dot: Optional[bool] = None, allow_source_trailing_dot: Optional[bool] = None, **kwargs: Any ) -> None: - version: Literal["2026-02-06"] = kwargs.pop("version", "2026-02-06") file_range_write_from_url: Literal["update"] = kwargs.pop("file_range_write_from_url", "update") + if version is None: + raise ValueError("Parameter 'version' must not be None.") if url is None: raise ValueError("Parameter 'url' must not be None.") + self.version = version self.url = url self.file_request_intent = file_request_intent self.allow_trailing_dot = allow_trailing_dot self.allow_source_trailing_dot = allow_source_trailing_dot - self.version = version self.file_range_write_from_url = file_range_write_from_url kwargs.setdefault("sdk_moniker", "azurefilestorage/{}".format(VERSION)) self.polling_interval = kwargs.get("polling_interval", 30) diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_directory_operations.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_directory_operations.py index cba4685c7443..b83ffd8a6223 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_directory_operations.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_directory_operations.py @@ -62,7 +62,7 @@ def __init__(self, *args, **kwargs) -> None: self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace_async - async def create( + async def create( # pylint: disable=too-many-locals self, timeout: Optional[int] = None, metadata: Optional[dict[str, str]] = None, @@ -152,6 +152,7 @@ async def create( _request = build_create_request( url=self._config.url, + version=self._config.version, timeout=timeout, metadata=metadata, file_permission=file_permission, @@ -168,7 +169,6 @@ async def create( allow_trailing_dot=self._config.allow_trailing_dot, file_request_intent=self._config.file_request_intent, restype=restype, - version=self._config.version, headers=_headers, params=_params, ) @@ -259,12 +259,12 @@ async def get_properties( _request = build_get_properties_request( url=self._config.url, + version=self._config.version, sharesnapshot=sharesnapshot, timeout=timeout, allow_trailing_dot=self._config.allow_trailing_dot, file_request_intent=self._config.file_request_intent, restype=restype, - version=self._config.version, headers=_headers, params=_params, ) @@ -350,11 +350,11 @@ async def delete(self, timeout: Optional[int] = None, **kwargs: Any) -> None: _request = build_delete_request( url=self._config.url, + version=self._config.version, timeout=timeout, allow_trailing_dot=self._config.allow_trailing_dot, file_request_intent=self._config.file_request_intent, restype=restype, - version=self._config.version, headers=_headers, params=_params, ) @@ -384,7 +384,7 @@ async def delete(self, timeout: Optional[int] = None, **kwargs: Any) -> None: return cls(pipeline_response, None, response_headers) # type: ignore @distributed_trace_async - async def set_properties( + async def set_properties( # pylint: disable=too-many-locals self, timeout: Optional[int] = None, file_permission: str = "inherit", @@ -465,6 +465,7 @@ async def set_properties( _request = build_set_properties_request( url=self._config.url, + version=self._config.version, timeout=timeout, file_permission=file_permission, file_permission_format=file_permission_format, @@ -480,7 +481,6 @@ async def set_properties( file_request_intent=self._config.file_request_intent, restype=restype, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -569,13 +569,13 @@ async def set_metadata( _request = build_set_metadata_request( url=self._config.url, + version=self._config.version, timeout=timeout, metadata=metadata, allow_trailing_dot=self._config.allow_trailing_dot, file_request_intent=self._config.file_request_intent, restype=restype, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -669,6 +669,7 @@ async def list_files_and_directories_segment( _request = build_list_files_and_directories_segment_request( url=self._config.url, + version=self._config.version, prefix=prefix, sharesnapshot=sharesnapshot, marker=marker, @@ -680,7 +681,6 @@ async def list_files_and_directories_segment( file_request_intent=self._config.file_request_intent, restype=restype, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -766,6 +766,7 @@ async def list_handles( _request = build_list_handles_request( url=self._config.url, + version=self._config.version, marker=marker, maxresults=maxresults, timeout=timeout, @@ -774,7 +775,6 @@ async def list_handles( allow_trailing_dot=self._config.allow_trailing_dot, file_request_intent=self._config.file_request_intent, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -860,6 +860,7 @@ async def force_close_handles( _request = build_force_close_handles_request( url=self._config.url, handle_id=handle_id, + version=self._config.version, timeout=timeout, marker=marker, sharesnapshot=sharesnapshot, @@ -867,7 +868,6 @@ async def force_close_handles( allow_trailing_dot=self._config.allow_trailing_dot, file_request_intent=self._config.file_request_intent, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -904,7 +904,7 @@ async def force_close_handles( return cls(pipeline_response, None, response_headers) # type: ignore @distributed_trace_async - async def rename( + async def rename( # pylint: disable=too-many-locals self, rename_source: str, timeout: Optional[int] = None, @@ -1007,6 +1007,7 @@ async def rename( _request = build_rename_request( url=self._config.url, rename_source=rename_source, + version=self._config.version, timeout=timeout, replace_if_exists=replace_if_exists, ignore_read_only=ignore_read_only, @@ -1025,7 +1026,6 @@ async def rename( file_request_intent=self._config.file_request_intent, restype=restype, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_file_operations.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_file_operations.py index e0296f2ea2f5..76311af25265 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_file_operations.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_file_operations.py @@ -76,7 +76,7 @@ def __init__(self, *args, **kwargs) -> None: self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace_async - async def create( + async def create( # pylint: disable=too-many-locals self, file_content_length: int, timeout: Optional[int] = None, @@ -216,6 +216,7 @@ async def create( _request = build_create_request( url=self._config.url, file_content_length=file_content_length, + version=self._config.version, timeout=timeout, file_content_type=_file_content_type, file_content_encoding=_file_content_encoding, @@ -243,7 +244,6 @@ async def create( file_request_intent=self._config.file_request_intent, file_type_constant=file_type_constant, content_type=content_type, - version=self._config.version, content=_content, headers=_headers, params=_params, @@ -352,6 +352,7 @@ async def download( _request = build_download_request( url=self._config.url, + version=self._config.version, timeout=timeout, range=range, range_get_content_md5=range_get_content_md5, @@ -359,7 +360,6 @@ async def download( lease_id=_lease_id, allow_trailing_dot=self._config.allow_trailing_dot, file_request_intent=self._config.file_request_intent, - version=self._config.version, headers=_headers, params=_params, ) @@ -497,12 +497,12 @@ async def get_properties( _request = build_get_properties_request( url=self._config.url, + version=self._config.version, sharesnapshot=sharesnapshot, timeout=timeout, lease_id=_lease_id, allow_trailing_dot=self._config.allow_trailing_dot, file_request_intent=self._config.file_request_intent, - version=self._config.version, headers=_headers, params=_params, ) @@ -619,11 +619,11 @@ async def delete( _request = build_delete_request( url=self._config.url, + version=self._config.version, timeout=timeout, lease_id=_lease_id, allow_trailing_dot=self._config.allow_trailing_dot, file_request_intent=self._config.file_request_intent, - version=self._config.version, headers=_headers, params=_params, ) @@ -654,7 +654,7 @@ async def delete( return cls(pipeline_response, None, response_headers) # type: ignore @distributed_trace_async - async def set_http_headers( + async def set_http_headers( # pylint: disable=too-many-locals self, timeout: Optional[int] = None, file_content_length: Optional[int] = None, @@ -762,6 +762,7 @@ async def set_http_headers( _request = build_set_http_headers_request( url=self._config.url, + version=self._config.version, timeout=timeout, file_content_length=file_content_length, file_content_type=_file_content_type, @@ -784,7 +785,6 @@ async def set_http_headers( allow_trailing_dot=self._config.allow_trailing_dot, file_request_intent=self._config.file_request_intent, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -883,13 +883,13 @@ async def set_metadata( _request = build_set_metadata_request( url=self._config.url, + version=self._config.version, timeout=timeout, metadata=metadata, lease_id=_lease_id, allow_trailing_dot=self._config.allow_trailing_dot, file_request_intent=self._config.file_request_intent, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -972,6 +972,7 @@ async def acquire_lease( _request = build_acquire_lease_request( url=self._config.url, + version=self._config.version, timeout=timeout, duration=duration, proposed_lease_id=proposed_lease_id, @@ -980,7 +981,6 @@ async def acquire_lease( file_request_intent=self._config.file_request_intent, comp=comp, action=action, - version=self._config.version, headers=_headers, params=_params, ) @@ -1055,13 +1055,13 @@ async def release_lease( _request = build_release_lease_request( url=self._config.url, lease_id=lease_id, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, allow_trailing_dot=self._config.allow_trailing_dot, file_request_intent=self._config.file_request_intent, comp=comp, action=action, - version=self._config.version, headers=_headers, params=_params, ) @@ -1144,6 +1144,7 @@ async def change_lease( _request = build_change_lease_request( url=self._config.url, lease_id=lease_id, + version=self._config.version, timeout=timeout, proposed_lease_id=proposed_lease_id, request_id_parameter=request_id_parameter, @@ -1151,7 +1152,6 @@ async def change_lease( file_request_intent=self._config.file_request_intent, comp=comp, action=action, - version=self._config.version, headers=_headers, params=_params, ) @@ -1233,6 +1233,7 @@ async def break_lease( _request = build_break_lease_request( url=self._config.url, + version=self._config.version, timeout=timeout, lease_id=_lease_id, request_id_parameter=request_id_parameter, @@ -1240,7 +1241,6 @@ async def break_lease( file_request_intent=self._config.file_request_intent, comp=comp, action=action, - version=self._config.version, headers=_headers, params=_params, ) @@ -1276,7 +1276,7 @@ async def break_lease( return cls(pipeline_response, None, response_headers) # type: ignore @distributed_trace_async - async def upload_range( + async def upload_range( # pylint: disable=too-many-locals self, range: str, content_length: int, @@ -1364,6 +1364,7 @@ async def upload_range( url=self._config.url, range=range, content_length=content_length, + version=self._config.version, timeout=timeout, file_range_write=file_range_write, content_md5=content_md5, @@ -1375,7 +1376,6 @@ async def upload_range( file_request_intent=self._config.file_request_intent, comp=comp, content_type=content_type, - version=self._config.version, content=_content, headers=_headers, params=_params, @@ -1501,6 +1501,7 @@ async def upload_range_from_url( range=range, copy_source=copy_source, content_length=content_length, + version=self._config.version, timeout=timeout, source_range=source_range, source_content_crc64=source_content_crc64, @@ -1514,7 +1515,6 @@ async def upload_range_from_url( file_request_intent=self._config.file_request_intent, comp=comp, file_range_write_from_url=self._config.file_range_write_from_url, - version=self._config.version, headers=_headers, params=_params, ) @@ -1614,6 +1614,7 @@ async def get_range_list( _request = build_get_range_list_request( url=self._config.url, + version=self._config.version, sharesnapshot=sharesnapshot, prevsharesnapshot=prevsharesnapshot, timeout=timeout, @@ -1623,7 +1624,6 @@ async def get_range_list( allow_trailing_dot=self._config.allow_trailing_dot, file_request_intent=self._config.file_request_intent, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -1660,7 +1660,7 @@ async def get_range_list( return deserialized # type: ignore @distributed_trace_async - async def start_copy( + async def start_copy( # pylint: disable=too-many-locals self, copy_source: str, timeout: Optional[int] = None, @@ -1775,6 +1775,7 @@ async def start_copy( _request = build_start_copy_request( url=self._config.url, copy_source=copy_source, + version=self._config.version, timeout=timeout, metadata=metadata, file_permission=file_permission, @@ -1796,7 +1797,6 @@ async def start_copy( allow_trailing_dot=self._config.allow_trailing_dot, allow_source_trailing_dot=self._config.allow_source_trailing_dot, file_request_intent=self._config.file_request_intent, - version=self._config.version, headers=_headers, params=_params, ) @@ -1878,13 +1878,13 @@ async def abort_copy( _request = build_abort_copy_request( url=self._config.url, copy_id=copy_id, + version=self._config.version, timeout=timeout, lease_id=_lease_id, allow_trailing_dot=self._config.allow_trailing_dot, file_request_intent=self._config.file_request_intent, comp=comp, copy_action_abort_constant=copy_action_abort_constant, - version=self._config.version, headers=_headers, params=_params, ) @@ -1961,6 +1961,7 @@ async def list_handles( _request = build_list_handles_request( url=self._config.url, + version=self._config.version, marker=marker, maxresults=maxresults, timeout=timeout, @@ -1968,7 +1969,6 @@ async def list_handles( allow_trailing_dot=self._config.allow_trailing_dot, file_request_intent=self._config.file_request_intent, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -2050,13 +2050,13 @@ async def force_close_handles( _request = build_force_close_handles_request( url=self._config.url, handle_id=handle_id, + version=self._config.version, timeout=timeout, marker=marker, sharesnapshot=sharesnapshot, allow_trailing_dot=self._config.allow_trailing_dot, file_request_intent=self._config.file_request_intent, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -2093,7 +2093,7 @@ async def force_close_handles( return cls(pipeline_response, None, response_headers) # type: ignore @distributed_trace_async - async def rename( + async def rename( # pylint: disable=too-many-locals self, rename_source: str, timeout: Optional[int] = None, @@ -2201,6 +2201,7 @@ async def rename( _request = build_rename_request( url=self._config.url, rename_source=rename_source, + version=self._config.version, timeout=timeout, replace_if_exists=replace_if_exists, ignore_read_only=ignore_read_only, @@ -2219,7 +2220,6 @@ async def rename( allow_source_trailing_dot=self._config.allow_source_trailing_dot, file_request_intent=self._config.file_request_intent, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -2341,6 +2341,7 @@ async def create_symbolic_link( _request = build_create_symbolic_link_request( url=self._config.url, link_text=link_text, + version=self._config.version, timeout=timeout, metadata=metadata, file_creation_time=file_creation_time, @@ -2351,7 +2352,6 @@ async def create_symbolic_link( group=group, file_request_intent=self._config.file_request_intent, restype=restype, - version=self._config.version, headers=_headers, params=_params, ) @@ -2442,12 +2442,12 @@ async def get_symbolic_link( _request = build_get_symbolic_link_request( url=self._config.url, + version=self._config.version, timeout=timeout, sharesnapshot=sharesnapshot, request_id_parameter=request_id_parameter, file_request_intent=self._config.file_request_intent, restype=restype, - version=self._config.version, headers=_headers, params=_params, ) @@ -2534,13 +2534,13 @@ async def create_hard_link( _request = build_create_hard_link_request( url=self._config.url, target_file=target_file, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, lease_id=_lease_id, file_request_intent=self._config.file_request_intent, restype=restype, file_type_constant=file_type_constant, - version=self._config.version, headers=_headers, params=_params, ) diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_service_operations.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_service_operations.py index 212bf8430546..4d7dafcf5895 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_service_operations.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_service_operations.py @@ -94,12 +94,12 @@ async def set_properties( _request = build_set_properties_request( url=self._config.url, + version=self._config.version, timeout=timeout, file_request_intent=self._config.file_request_intent, restype=restype, comp=comp, content_type=content_type, - version=self._config.version, content=_content, headers=_headers, params=_params, @@ -159,11 +159,11 @@ async def get_properties(self, timeout: Optional[int] = None, **kwargs: Any) -> _request = build_get_properties_request( url=self._config.url, + version=self._config.version, timeout=timeout, file_request_intent=self._config.file_request_intent, restype=restype, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -248,6 +248,7 @@ async def list_shares_segment( _request = build_list_shares_segment_request( url=self._config.url, + version=self._config.version, prefix=prefix, marker=marker, maxresults=maxresults, @@ -255,7 +256,6 @@ async def list_shares_segment( timeout=timeout, file_request_intent=self._config.file_request_intent, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -333,12 +333,12 @@ async def get_user_delegation_key( _request = build_get_user_delegation_key_request( url=self._config.url, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, restype=restype, comp=comp, content_type=content_type, - version=self._config.version, content=_content, headers=_headers, params=_params, diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_share_operations.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_share_operations.py index 491579cde044..39daa0cfeb65 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_share_operations.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_share_operations.py @@ -71,7 +71,7 @@ def __init__(self, *args, **kwargs) -> None: self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace_async - async def create( + async def create( # pylint: disable=too-many-locals self, timeout: Optional[int] = None, metadata: Optional[dict[str, str]] = None, @@ -157,6 +157,7 @@ async def create( _request = build_create_request( url=self._config.url, + version=self._config.version, timeout=timeout, metadata=metadata, quota=quota, @@ -172,7 +173,6 @@ async def create( enable_smb_directory_lease=enable_smb_directory_lease, file_request_intent=self._config.file_request_intent, restype=restype, - version=self._config.version, headers=_headers, params=_params, ) @@ -261,12 +261,12 @@ async def get_properties( _request = build_get_properties_request( url=self._config.url, + version=self._config.version, sharesnapshot=sharesnapshot, timeout=timeout, lease_id=_lease_id, file_request_intent=self._config.file_request_intent, restype=restype, - version=self._config.version, headers=_headers, params=_params, ) @@ -404,13 +404,13 @@ async def delete( _request = build_delete_request( url=self._config.url, + version=self._config.version, sharesnapshot=sharesnapshot, timeout=timeout, delete_snapshots=delete_snapshots, lease_id=_lease_id, file_request_intent=self._config.file_request_intent, restype=restype, - version=self._config.version, headers=_headers, params=_params, ) @@ -500,6 +500,7 @@ async def acquire_lease( _request = build_acquire_lease_request( url=self._config.url, + version=self._config.version, timeout=timeout, duration=duration, proposed_lease_id=proposed_lease_id, @@ -509,7 +510,6 @@ async def acquire_lease( comp=comp, action=action, restype=restype, - version=self._config.version, headers=_headers, params=_params, ) @@ -593,6 +593,7 @@ async def release_lease( _request = build_release_lease_request( url=self._config.url, lease_id=lease_id, + version=self._config.version, timeout=timeout, sharesnapshot=sharesnapshot, request_id_parameter=request_id_parameter, @@ -600,7 +601,6 @@ async def release_lease( comp=comp, action=action, restype=restype, - version=self._config.version, headers=_headers, params=_params, ) @@ -688,6 +688,7 @@ async def change_lease( _request = build_change_lease_request( url=self._config.url, lease_id=lease_id, + version=self._config.version, timeout=timeout, proposed_lease_id=proposed_lease_id, sharesnapshot=sharesnapshot, @@ -696,7 +697,6 @@ async def change_lease( comp=comp, action=action, restype=restype, - version=self._config.version, headers=_headers, params=_params, ) @@ -780,6 +780,7 @@ async def renew_lease( _request = build_renew_lease_request( url=self._config.url, lease_id=lease_id, + version=self._config.version, timeout=timeout, sharesnapshot=sharesnapshot, request_id_parameter=request_id_parameter, @@ -787,7 +788,6 @@ async def renew_lease( comp=comp, action=action, restype=restype, - version=self._config.version, headers=_headers, params=_params, ) @@ -883,6 +883,7 @@ async def break_lease( _request = build_break_lease_request( url=self._config.url, + version=self._config.version, timeout=timeout, break_period=break_period, lease_id=_lease_id, @@ -892,7 +893,6 @@ async def break_lease( comp=comp, action=action, restype=restype, - version=self._config.version, headers=_headers, params=_params, ) @@ -963,12 +963,12 @@ async def create_snapshot( _request = build_create_snapshot_request( url=self._config.url, + version=self._config.version, timeout=timeout, metadata=metadata, file_request_intent=self._config.file_request_intent, restype=restype, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -1096,12 +1096,12 @@ async def create_permission( _request = build_create_permission_request( url=self._config.url, + version=self._config.version, timeout=timeout, file_request_intent=self._config.file_request_intent, restype=restype, comp=comp, content_type=content_type, - version=self._config.version, json=_json, content=_content, headers=_headers, @@ -1181,12 +1181,12 @@ async def get_permission( _request = build_get_permission_request( url=self._config.url, file_permission_key=file_permission_key, + version=self._config.version, file_permission_format=file_permission_format, timeout=timeout, file_request_intent=self._config.file_request_intent, restype=restype, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -1220,7 +1220,7 @@ async def get_permission( return deserialized # type: ignore @distributed_trace_async - async def set_properties( + async def set_properties( # pylint: disable=too-many-locals self, timeout: Optional[int] = None, quota: Optional[int] = None, @@ -1306,6 +1306,7 @@ async def set_properties( _request = build_set_properties_request( url=self._config.url, + version=self._config.version, timeout=timeout, quota=quota, access_tier=access_tier, @@ -1321,7 +1322,6 @@ async def set_properties( file_request_intent=self._config.file_request_intent, restype=restype, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -1419,13 +1419,13 @@ async def set_metadata( _request = build_set_metadata_request( url=self._config.url, + version=self._config.version, timeout=timeout, metadata=metadata, lease_id=_lease_id, file_request_intent=self._config.file_request_intent, restype=restype, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -1497,12 +1497,12 @@ async def get_access_policy( _request = build_get_access_policy_request( url=self._config.url, + version=self._config.version, timeout=timeout, lease_id=_lease_id, file_request_intent=self._config.file_request_intent, restype=restype, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -1590,13 +1590,13 @@ async def set_access_policy( _request = build_set_access_policy_request( url=self._config.url, + version=self._config.version, timeout=timeout, lease_id=_lease_id, file_request_intent=self._config.file_request_intent, restype=restype, comp=comp, content_type=content_type, - version=self._config.version, content=_content, headers=_headers, params=_params, @@ -1669,12 +1669,12 @@ async def get_statistics( _request = build_get_statistics_request( url=self._config.url, + version=self._config.version, timeout=timeout, lease_id=_lease_id, file_request_intent=self._config.file_request_intent, restype=restype, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -1756,6 +1756,7 @@ async def restore( _request = build_restore_request( url=self._config.url, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, deleted_share_name=deleted_share_name, @@ -1763,7 +1764,6 @@ async def restore( file_request_intent=self._config.file_request_intent, restype=restype, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/models/_azure_file_storage_enums.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/models/_azure_file_storage_enums.py index 550ebe4ff758..34c21387336a 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/models/_azure_file_storage_enums.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/models/_azure_file_storage_enums.py @@ -228,3 +228,10 @@ class StorageErrorCode(str, Enum, metaclass=CaseInsensitiveEnumMeta): AUTHORIZATION_RESOURCE_TYPE_MISMATCH = "AuthorizationResourceTypeMismatch" FEATURE_VERSION_MISMATCH = "FeatureVersionMismatch" SHARE_SNAPSHOT_NOT_FOUND = "ShareSnapshotNotFound" + FILE_SHARE_PROVISIONED_IOPS_INVALID = "FileShareProvisionedIopsInvalid" + FILE_SHARE_PROVISIONED_BANDWIDTH_INVALID = "FileShareProvisionedBandwidthInvalid" + FILE_SHARE_PROVISIONED_STORAGE_INVALID = "FileShareProvisionedStorageInvalid" + TOTAL_SHARES_PROVISIONED_CAPACITY_EXCEEDS_ACCOUNT_LIMIT = "TotalSharesProvisionedCapacityExceedsAccountLimit" + TOTAL_SHARES_PROVISIONED_IOPS_EXCEEDS_ACCOUNT_LIMIT = "TotalSharesProvisionedIopsExceedsAccountLimit" + TOTAL_SHARES_PROVISIONED_BANDWIDTH_EXCEEDS_ACCOUNT_LIMIT = "TotalSharesProvisionedBandwidthExceedsAccountLimit" + TOTAL_SHARES_COUNT_EXCEEDS_ACCOUNT_LIMIT = "TotalSharesCountExceedsAccountLimit" diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/models/_models_py3.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/models/_models_py3.py index f7daeec6e89d..14572ebc405a 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/models/_models_py3.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/models/_models_py3.py @@ -741,6 +741,8 @@ class KeyInfo(_serialization.Model): :vartype start: str :ivar expiry: The date-time the key expires in ISO 8601 UTC time. Required. :vartype expiry: str + :ivar delegated_user_tid: The delegated user tenant id in Azure AD. + :vartype delegated_user_tid: str """ _validation = { @@ -750,18 +752,24 @@ class KeyInfo(_serialization.Model): _attribute_map = { "start": {"key": "Start", "type": "str"}, "expiry": {"key": "Expiry", "type": "str"}, + "delegated_user_tid": {"key": "DelegatedUserTid", "type": "str"}, } - def __init__(self, *, expiry: str, start: Optional[str] = None, **kwargs: Any) -> None: + def __init__( + self, *, expiry: str, start: Optional[str] = None, delegated_user_tid: Optional[str] = None, **kwargs: Any + ) -> None: """ :keyword start: The date-time the key is active in ISO 8601 UTC time. :paramtype start: str :keyword expiry: The date-time the key expires in ISO 8601 UTC time. Required. :paramtype expiry: str + :keyword delegated_user_tid: The delegated user tenant id in Azure AD. + :paramtype delegated_user_tid: str """ super().__init__(**kwargs) self.start = start self.expiry = expiry + self.delegated_user_tid = delegated_user_tid class LeaseAccessConditions(_serialization.Model): @@ -1883,6 +1891,9 @@ class UserDelegationKey(_serialization.Model): :vartype signed_service: str :ivar signed_version: The service version that created the key. Required. :vartype signed_version: str + :ivar signed_delegated_user_tid: The delegated user tenant id in Azure AD. Return if + DelegatedUserTid is specified. + :vartype signed_delegated_user_tid: str :ivar value: The key as a base64 string. Required. :vartype value: str """ @@ -1904,6 +1915,7 @@ class UserDelegationKey(_serialization.Model): "signed_expiry": {"key": "SignedExpiry", "type": "iso-8601"}, "signed_service": {"key": "SignedService", "type": "str"}, "signed_version": {"key": "SignedVersion", "type": "str"}, + "signed_delegated_user_tid": {"key": "SignedDelegatedUserTid", "type": "str"}, "value": {"key": "Value", "type": "str"}, } @@ -1917,6 +1929,7 @@ def __init__( signed_service: str, signed_version: str, value: str, + signed_delegated_user_tid: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -1933,6 +1946,9 @@ def __init__( :paramtype signed_service: str :keyword signed_version: The service version that created the key. Required. :paramtype signed_version: str + :keyword signed_delegated_user_tid: The delegated user tenant id in Azure AD. Return if + DelegatedUserTid is specified. + :paramtype signed_delegated_user_tid: str :keyword value: The key as a base64 string. Required. :paramtype value: str """ @@ -1943,4 +1959,5 @@ def __init__( self.signed_expiry = signed_expiry self.signed_service = signed_service self.signed_version = signed_version + self.signed_delegated_user_tid = signed_delegated_user_tid self.value = value diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_directory_operations.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_directory_operations.py index 521a291a889b..157426b206fe 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_directory_operations.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_directory_operations.py @@ -37,6 +37,7 @@ def build_create_request( url: str, *, + version: str, timeout: Optional[int] = None, metadata: Optional[dict[str, str]] = None, file_permission: str = "inherit", @@ -58,7 +59,6 @@ def build_create_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) restype: Literal["directory"] = kwargs.pop("restype", _params.pop("restype", "directory")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -116,6 +116,7 @@ def build_create_request( def build_get_properties_request( url: str, *, + version: str, sharesnapshot: Optional[str] = None, timeout: Optional[int] = None, allow_trailing_dot: Optional[bool] = None, @@ -126,7 +127,6 @@ def build_get_properties_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) restype: Literal["directory"] = kwargs.pop("restype", _params.pop("restype", "directory")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -158,6 +158,7 @@ def build_get_properties_request( def build_delete_request( url: str, *, + version: str, timeout: Optional[int] = None, allow_trailing_dot: Optional[bool] = None, file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None, @@ -167,7 +168,6 @@ def build_delete_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) restype: Literal["directory"] = kwargs.pop("restype", _params.pop("restype", "directory")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -197,6 +197,7 @@ def build_delete_request( def build_set_properties_request( url: str, *, + version: str, timeout: Optional[int] = None, file_permission: str = "inherit", file_permission_format: Optional[Union[str, _models.FilePermissionFormat]] = None, @@ -217,7 +218,6 @@ def build_set_properties_request( restype: Literal["directory"] = kwargs.pop("restype", _params.pop("restype", "directory")) comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -270,6 +270,7 @@ def build_set_properties_request( def build_set_metadata_request( url: str, *, + version: str, timeout: Optional[int] = None, metadata: Optional[dict[str, str]] = None, allow_trailing_dot: Optional[bool] = None, @@ -281,7 +282,6 @@ def build_set_metadata_request( restype: Literal["directory"] = kwargs.pop("restype", _params.pop("restype", "directory")) comp: Literal["metadata"] = kwargs.pop("comp", _params.pop("comp", "metadata")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -314,6 +314,7 @@ def build_set_metadata_request( def build_list_files_and_directories_segment_request( # pylint: disable=name-too-long url: str, *, + version: str, prefix: Optional[str] = None, sharesnapshot: Optional[str] = None, marker: Optional[str] = None, @@ -330,7 +331,6 @@ def build_list_files_and_directories_segment_request( # pylint: disable=name-to restype: Literal["directory"] = kwargs.pop("restype", _params.pop("restype", "directory")) comp: Literal["list"] = kwargs.pop("comp", _params.pop("comp", "list")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -373,6 +373,7 @@ def build_list_files_and_directories_segment_request( # pylint: disable=name-to def build_list_handles_request( url: str, *, + version: str, marker: Optional[str] = None, maxresults: Optional[int] = None, timeout: Optional[int] = None, @@ -386,7 +387,6 @@ def build_list_handles_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) comp: Literal["listhandles"] = kwargs.pop("comp", _params.pop("comp", "listhandles")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -425,6 +425,7 @@ def build_force_close_handles_request( url: str, *, handle_id: str, + version: str, timeout: Optional[int] = None, marker: Optional[str] = None, sharesnapshot: Optional[str] = None, @@ -437,7 +438,6 @@ def build_force_close_handles_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) comp: Literal["forceclosehandles"] = kwargs.pop("comp", _params.pop("comp", "forceclosehandles")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -471,10 +471,11 @@ def build_force_close_handles_request( return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) -def build_rename_request( +def build_rename_request( # pylint: disable=too-many-locals url: str, *, rename_source: str, + version: str, timeout: Optional[int] = None, replace_if_exists: Optional[bool] = None, ignore_read_only: Optional[bool] = None, @@ -498,7 +499,6 @@ def build_rename_request( restype: Literal["directory"] = kwargs.pop("restype", _params.pop("restype", "directory")) comp: Literal["rename"] = kwargs.pop("comp", _params.pop("comp", "rename")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -579,7 +579,7 @@ def __init__(self, *args, **kwargs) -> None: self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace - def create( # pylint: disable=inconsistent-return-statements + def create( # pylint: disable=inconsistent-return-statements,too-many-locals self, timeout: Optional[int] = None, metadata: Optional[dict[str, str]] = None, @@ -669,6 +669,7 @@ def create( # pylint: disable=inconsistent-return-statements _request = build_create_request( url=self._config.url, + version=self._config.version, timeout=timeout, metadata=metadata, file_permission=file_permission, @@ -685,7 +686,6 @@ def create( # pylint: disable=inconsistent-return-statements allow_trailing_dot=self._config.allow_trailing_dot, file_request_intent=self._config.file_request_intent, restype=restype, - version=self._config.version, headers=_headers, params=_params, ) @@ -776,12 +776,12 @@ def get_properties( # pylint: disable=inconsistent-return-statements _request = build_get_properties_request( url=self._config.url, + version=self._config.version, sharesnapshot=sharesnapshot, timeout=timeout, allow_trailing_dot=self._config.allow_trailing_dot, file_request_intent=self._config.file_request_intent, restype=restype, - version=self._config.version, headers=_headers, params=_params, ) @@ -869,11 +869,11 @@ def delete( # pylint: disable=inconsistent-return-statements _request = build_delete_request( url=self._config.url, + version=self._config.version, timeout=timeout, allow_trailing_dot=self._config.allow_trailing_dot, file_request_intent=self._config.file_request_intent, restype=restype, - version=self._config.version, headers=_headers, params=_params, ) @@ -903,7 +903,7 @@ def delete( # pylint: disable=inconsistent-return-statements return cls(pipeline_response, None, response_headers) # type: ignore @distributed_trace - def set_properties( # pylint: disable=inconsistent-return-statements + def set_properties( # pylint: disable=inconsistent-return-statements,too-many-locals self, timeout: Optional[int] = None, file_permission: str = "inherit", @@ -984,6 +984,7 @@ def set_properties( # pylint: disable=inconsistent-return-statements _request = build_set_properties_request( url=self._config.url, + version=self._config.version, timeout=timeout, file_permission=file_permission, file_permission_format=file_permission_format, @@ -999,7 +1000,6 @@ def set_properties( # pylint: disable=inconsistent-return-statements file_request_intent=self._config.file_request_intent, restype=restype, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -1088,13 +1088,13 @@ def set_metadata( # pylint: disable=inconsistent-return-statements _request = build_set_metadata_request( url=self._config.url, + version=self._config.version, timeout=timeout, metadata=metadata, allow_trailing_dot=self._config.allow_trailing_dot, file_request_intent=self._config.file_request_intent, restype=restype, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -1188,6 +1188,7 @@ def list_files_and_directories_segment( _request = build_list_files_and_directories_segment_request( url=self._config.url, + version=self._config.version, prefix=prefix, sharesnapshot=sharesnapshot, marker=marker, @@ -1199,7 +1200,6 @@ def list_files_and_directories_segment( file_request_intent=self._config.file_request_intent, restype=restype, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -1285,6 +1285,7 @@ def list_handles( _request = build_list_handles_request( url=self._config.url, + version=self._config.version, marker=marker, maxresults=maxresults, timeout=timeout, @@ -1293,7 +1294,6 @@ def list_handles( allow_trailing_dot=self._config.allow_trailing_dot, file_request_intent=self._config.file_request_intent, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -1379,6 +1379,7 @@ def force_close_handles( # pylint: disable=inconsistent-return-statements _request = build_force_close_handles_request( url=self._config.url, handle_id=handle_id, + version=self._config.version, timeout=timeout, marker=marker, sharesnapshot=sharesnapshot, @@ -1386,7 +1387,6 @@ def force_close_handles( # pylint: disable=inconsistent-return-statements allow_trailing_dot=self._config.allow_trailing_dot, file_request_intent=self._config.file_request_intent, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -1423,7 +1423,7 @@ def force_close_handles( # pylint: disable=inconsistent-return-statements return cls(pipeline_response, None, response_headers) # type: ignore @distributed_trace - def rename( # pylint: disable=inconsistent-return-statements + def rename( # pylint: disable=inconsistent-return-statements,too-many-locals self, rename_source: str, timeout: Optional[int] = None, @@ -1526,6 +1526,7 @@ def rename( # pylint: disable=inconsistent-return-statements _request = build_rename_request( url=self._config.url, rename_source=rename_source, + version=self._config.version, timeout=timeout, replace_if_exists=replace_if_exists, ignore_read_only=ignore_read_only, @@ -1544,7 +1545,6 @@ def rename( # pylint: disable=inconsistent-return-statements file_request_intent=self._config.file_request_intent, restype=restype, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_file_operations.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_file_operations.py index f841c4296f3b..176dfe9bda33 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_file_operations.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_file_operations.py @@ -36,10 +36,11 @@ _SERIALIZER.client_side_validation = False -def build_create_request( +def build_create_request( # pylint: disable=too-many-locals,too-many-statements,too-many-branches url: str, *, file_content_length: int, + version: str, timeout: Optional[int] = None, file_content_type: Optional[str] = None, file_content_encoding: Optional[str] = None, @@ -73,7 +74,6 @@ def build_create_request( file_type_constant: Literal["file"] = kwargs.pop("file_type_constant", _headers.pop("x-ms-type", "file")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -156,6 +156,7 @@ def build_create_request( def build_download_request( url: str, *, + version: str, timeout: Optional[int] = None, range: Optional[str] = None, range_get_content_md5: Optional[bool] = None, @@ -168,7 +169,6 @@ def build_download_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -207,6 +207,7 @@ def build_download_request( def build_get_properties_request( url: str, *, + version: str, sharesnapshot: Optional[str] = None, timeout: Optional[int] = None, lease_id: Optional[str] = None, @@ -217,7 +218,6 @@ def build_get_properties_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -250,6 +250,7 @@ def build_get_properties_request( def build_delete_request( url: str, *, + version: str, timeout: Optional[int] = None, lease_id: Optional[str] = None, allow_trailing_dot: Optional[bool] = None, @@ -259,7 +260,6 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -287,9 +287,10 @@ def build_delete_request( return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) -def build_set_http_headers_request( +def build_set_http_headers_request( # pylint: disable=too-many-locals url: str, *, + version: str, timeout: Optional[int] = None, file_content_length: Optional[int] = None, file_content_type: Optional[str] = None, @@ -317,7 +318,6 @@ def build_set_http_headers_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -387,6 +387,7 @@ def build_set_http_headers_request( def build_set_metadata_request( url: str, *, + version: str, timeout: Optional[int] = None, metadata: Optional[dict[str, str]] = None, lease_id: Optional[str] = None, @@ -398,7 +399,6 @@ def build_set_metadata_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) comp: Literal["metadata"] = kwargs.pop("comp", _params.pop("comp", "metadata")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -432,6 +432,7 @@ def build_set_metadata_request( def build_acquire_lease_request( url: str, *, + version: str, timeout: Optional[int] = None, duration: Optional[int] = None, proposed_lease_id: Optional[str] = None, @@ -445,7 +446,6 @@ def build_acquire_lease_request( comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) action: Literal["acquire"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "acquire")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -483,6 +483,7 @@ def build_release_lease_request( url: str, *, lease_id: str, + version: str, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, allow_trailing_dot: Optional[bool] = None, @@ -494,7 +495,6 @@ def build_release_lease_request( comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) action: Literal["release"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "release")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -529,6 +529,7 @@ def build_change_lease_request( url: str, *, lease_id: str, + version: str, timeout: Optional[int] = None, proposed_lease_id: Optional[str] = None, request_id_parameter: Optional[str] = None, @@ -541,7 +542,6 @@ def build_change_lease_request( comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) action: Literal["change"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "change")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -577,6 +577,7 @@ def build_change_lease_request( def build_break_lease_request( url: str, *, + version: str, timeout: Optional[int] = None, lease_id: Optional[str] = None, request_id_parameter: Optional[str] = None, @@ -589,7 +590,6 @@ def build_break_lease_request( comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) action: Literal["break"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "break")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -626,6 +626,7 @@ def build_upload_range_request( *, range: str, content_length: int, + version: str, timeout: Optional[int] = None, file_range_write: Union[str, _models.FileRangeWriteType] = "update", content_md5: Optional[bytes] = None, @@ -643,7 +644,6 @@ def build_upload_range_request( comp: Literal["range"] = kwargs.pop("comp", _params.pop("comp", "range")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -695,6 +695,7 @@ def build_upload_range_from_url_request( range: str, copy_source: str, content_length: int, + version: str, timeout: Optional[int] = None, source_range: Optional[str] = None, source_content_crc64: Optional[bytes] = None, @@ -715,7 +716,6 @@ def build_upload_range_from_url_request( file_range_write_from_url: Literal["update"] = kwargs.pop( "file_range_write_from_url", _headers.pop("x-ms-write", "update") ) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -777,6 +777,7 @@ def build_upload_range_from_url_request( def build_get_range_list_request( url: str, *, + version: str, sharesnapshot: Optional[str] = None, prevsharesnapshot: Optional[str] = None, timeout: Optional[int] = None, @@ -791,7 +792,6 @@ def build_get_range_list_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) comp: Literal["rangelist"] = kwargs.pop("comp", _params.pop("comp", "rangelist")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -828,10 +828,11 @@ def build_get_range_list_request( return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) -def build_start_copy_request( +def build_start_copy_request( # pylint: disable=too-many-locals url: str, *, copy_source: str, + version: str, timeout: Optional[int] = None, metadata: Optional[dict[str, str]] = None, file_permission: str = "inherit", @@ -858,7 +859,6 @@ def build_start_copy_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -933,6 +933,7 @@ def build_abort_copy_request( url: str, *, copy_id: str, + version: str, timeout: Optional[int] = None, lease_id: Optional[str] = None, allow_trailing_dot: Optional[bool] = None, @@ -946,7 +947,6 @@ def build_abort_copy_request( copy_action_abort_constant: Literal["abort"] = kwargs.pop( "copy_action_abort_constant", _headers.pop("x-ms-copy-action", "abort") ) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -980,6 +980,7 @@ def build_abort_copy_request( def build_list_handles_request( url: str, *, + version: str, marker: Optional[str] = None, maxresults: Optional[int] = None, timeout: Optional[int] = None, @@ -992,7 +993,6 @@ def build_list_handles_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) comp: Literal["listhandles"] = kwargs.pop("comp", _params.pop("comp", "listhandles")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -1029,6 +1029,7 @@ def build_force_close_handles_request( url: str, *, handle_id: str, + version: str, timeout: Optional[int] = None, marker: Optional[str] = None, sharesnapshot: Optional[str] = None, @@ -1040,7 +1041,6 @@ def build_force_close_handles_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) comp: Literal["forceclosehandles"] = kwargs.pop("comp", _params.pop("comp", "forceclosehandles")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -1072,10 +1072,11 @@ def build_force_close_handles_request( return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) -def build_rename_request( +def build_rename_request( # pylint: disable=too-many-locals url: str, *, rename_source: str, + version: str, timeout: Optional[int] = None, replace_if_exists: Optional[bool] = None, ignore_read_only: Optional[bool] = None, @@ -1099,7 +1100,6 @@ def build_rename_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) comp: Literal["rename"] = kwargs.pop("comp", _params.pop("comp", "rename")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -1165,6 +1165,7 @@ def build_create_symbolic_link_request( url: str, *, link_text: str, + version: str, timeout: Optional[int] = None, metadata: Optional[dict[str, str]] = None, file_creation_time: str = "now", @@ -1180,7 +1181,6 @@ def build_create_symbolic_link_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) restype: Literal["symboliclink"] = kwargs.pop("restype", _params.pop("restype", "symboliclink")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -1223,6 +1223,7 @@ def build_create_symbolic_link_request( def build_get_symbolic_link_request( url: str, *, + version: str, timeout: Optional[int] = None, sharesnapshot: Optional[str] = None, request_id_parameter: Optional[str] = None, @@ -1233,7 +1234,6 @@ def build_get_symbolic_link_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) restype: Literal["symboliclink"] = kwargs.pop("restype", _params.pop("restype", "symboliclink")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -1266,6 +1266,7 @@ def build_create_hard_link_request( url: str, *, target_file: str, + version: str, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, lease_id: Optional[str] = None, @@ -1277,7 +1278,6 @@ def build_create_hard_link_request( restype: Literal["hardlink"] = kwargs.pop("restype", _params.pop("restype", "hardlink")) file_type_constant: Literal["file"] = kwargs.pop("file_type_constant", _headers.pop("x-ms-type", "file")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -1328,7 +1328,7 @@ def __init__(self, *args, **kwargs) -> None: self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace - def create( # pylint: disable=inconsistent-return-statements + def create( # pylint: disable=inconsistent-return-statements,too-many-locals self, file_content_length: int, timeout: Optional[int] = None, @@ -1468,6 +1468,7 @@ def create( # pylint: disable=inconsistent-return-statements _request = build_create_request( url=self._config.url, file_content_length=file_content_length, + version=self._config.version, timeout=timeout, file_content_type=_file_content_type, file_content_encoding=_file_content_encoding, @@ -1495,7 +1496,6 @@ def create( # pylint: disable=inconsistent-return-statements file_request_intent=self._config.file_request_intent, file_type_constant=file_type_constant, content_type=content_type, - version=self._config.version, content=_content, headers=_headers, params=_params, @@ -1604,6 +1604,7 @@ def download( _request = build_download_request( url=self._config.url, + version=self._config.version, timeout=timeout, range=range, range_get_content_md5=range_get_content_md5, @@ -1611,7 +1612,6 @@ def download( lease_id=_lease_id, allow_trailing_dot=self._config.allow_trailing_dot, file_request_intent=self._config.file_request_intent, - version=self._config.version, headers=_headers, params=_params, ) @@ -1749,12 +1749,12 @@ def get_properties( # pylint: disable=inconsistent-return-statements _request = build_get_properties_request( url=self._config.url, + version=self._config.version, sharesnapshot=sharesnapshot, timeout=timeout, lease_id=_lease_id, allow_trailing_dot=self._config.allow_trailing_dot, file_request_intent=self._config.file_request_intent, - version=self._config.version, headers=_headers, params=_params, ) @@ -1871,11 +1871,11 @@ def delete( # pylint: disable=inconsistent-return-statements _request = build_delete_request( url=self._config.url, + version=self._config.version, timeout=timeout, lease_id=_lease_id, allow_trailing_dot=self._config.allow_trailing_dot, file_request_intent=self._config.file_request_intent, - version=self._config.version, headers=_headers, params=_params, ) @@ -1906,7 +1906,7 @@ def delete( # pylint: disable=inconsistent-return-statements return cls(pipeline_response, None, response_headers) # type: ignore @distributed_trace - def set_http_headers( # pylint: disable=inconsistent-return-statements + def set_http_headers( # pylint: disable=inconsistent-return-statements,too-many-locals self, timeout: Optional[int] = None, file_content_length: Optional[int] = None, @@ -2014,6 +2014,7 @@ def set_http_headers( # pylint: disable=inconsistent-return-statements _request = build_set_http_headers_request( url=self._config.url, + version=self._config.version, timeout=timeout, file_content_length=file_content_length, file_content_type=_file_content_type, @@ -2036,7 +2037,6 @@ def set_http_headers( # pylint: disable=inconsistent-return-statements allow_trailing_dot=self._config.allow_trailing_dot, file_request_intent=self._config.file_request_intent, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -2135,13 +2135,13 @@ def set_metadata( # pylint: disable=inconsistent-return-statements _request = build_set_metadata_request( url=self._config.url, + version=self._config.version, timeout=timeout, metadata=metadata, lease_id=_lease_id, allow_trailing_dot=self._config.allow_trailing_dot, file_request_intent=self._config.file_request_intent, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -2224,6 +2224,7 @@ def acquire_lease( # pylint: disable=inconsistent-return-statements _request = build_acquire_lease_request( url=self._config.url, + version=self._config.version, timeout=timeout, duration=duration, proposed_lease_id=proposed_lease_id, @@ -2232,7 +2233,6 @@ def acquire_lease( # pylint: disable=inconsistent-return-statements file_request_intent=self._config.file_request_intent, comp=comp, action=action, - version=self._config.version, headers=_headers, params=_params, ) @@ -2307,13 +2307,13 @@ def release_lease( # pylint: disable=inconsistent-return-statements _request = build_release_lease_request( url=self._config.url, lease_id=lease_id, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, allow_trailing_dot=self._config.allow_trailing_dot, file_request_intent=self._config.file_request_intent, comp=comp, action=action, - version=self._config.version, headers=_headers, params=_params, ) @@ -2396,6 +2396,7 @@ def change_lease( # pylint: disable=inconsistent-return-statements _request = build_change_lease_request( url=self._config.url, lease_id=lease_id, + version=self._config.version, timeout=timeout, proposed_lease_id=proposed_lease_id, request_id_parameter=request_id_parameter, @@ -2403,7 +2404,6 @@ def change_lease( # pylint: disable=inconsistent-return-statements file_request_intent=self._config.file_request_intent, comp=comp, action=action, - version=self._config.version, headers=_headers, params=_params, ) @@ -2485,6 +2485,7 @@ def break_lease( # pylint: disable=inconsistent-return-statements _request = build_break_lease_request( url=self._config.url, + version=self._config.version, timeout=timeout, lease_id=_lease_id, request_id_parameter=request_id_parameter, @@ -2492,7 +2493,6 @@ def break_lease( # pylint: disable=inconsistent-return-statements file_request_intent=self._config.file_request_intent, comp=comp, action=action, - version=self._config.version, headers=_headers, params=_params, ) @@ -2528,7 +2528,7 @@ def break_lease( # pylint: disable=inconsistent-return-statements return cls(pipeline_response, None, response_headers) # type: ignore @distributed_trace - def upload_range( # pylint: disable=inconsistent-return-statements + def upload_range( # pylint: disable=inconsistent-return-statements,too-many-locals self, range: str, content_length: int, @@ -2616,6 +2616,7 @@ def upload_range( # pylint: disable=inconsistent-return-statements url=self._config.url, range=range, content_length=content_length, + version=self._config.version, timeout=timeout, file_range_write=file_range_write, content_md5=content_md5, @@ -2627,7 +2628,6 @@ def upload_range( # pylint: disable=inconsistent-return-statements file_request_intent=self._config.file_request_intent, comp=comp, content_type=content_type, - version=self._config.version, content=_content, headers=_headers, params=_params, @@ -2753,6 +2753,7 @@ def upload_range_from_url( # pylint: disable=inconsistent-return-statements range=range, copy_source=copy_source, content_length=content_length, + version=self._config.version, timeout=timeout, source_range=source_range, source_content_crc64=source_content_crc64, @@ -2766,7 +2767,6 @@ def upload_range_from_url( # pylint: disable=inconsistent-return-statements file_request_intent=self._config.file_request_intent, comp=comp, file_range_write_from_url=self._config.file_range_write_from_url, - version=self._config.version, headers=_headers, params=_params, ) @@ -2866,6 +2866,7 @@ def get_range_list( _request = build_get_range_list_request( url=self._config.url, + version=self._config.version, sharesnapshot=sharesnapshot, prevsharesnapshot=prevsharesnapshot, timeout=timeout, @@ -2875,7 +2876,6 @@ def get_range_list( allow_trailing_dot=self._config.allow_trailing_dot, file_request_intent=self._config.file_request_intent, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -2912,7 +2912,7 @@ def get_range_list( return deserialized # type: ignore @distributed_trace - def start_copy( # pylint: disable=inconsistent-return-statements + def start_copy( # pylint: disable=inconsistent-return-statements,too-many-locals self, copy_source: str, timeout: Optional[int] = None, @@ -3027,6 +3027,7 @@ def start_copy( # pylint: disable=inconsistent-return-statements _request = build_start_copy_request( url=self._config.url, copy_source=copy_source, + version=self._config.version, timeout=timeout, metadata=metadata, file_permission=file_permission, @@ -3048,7 +3049,6 @@ def start_copy( # pylint: disable=inconsistent-return-statements allow_trailing_dot=self._config.allow_trailing_dot, allow_source_trailing_dot=self._config.allow_source_trailing_dot, file_request_intent=self._config.file_request_intent, - version=self._config.version, headers=_headers, params=_params, ) @@ -3130,13 +3130,13 @@ def abort_copy( # pylint: disable=inconsistent-return-statements _request = build_abort_copy_request( url=self._config.url, copy_id=copy_id, + version=self._config.version, timeout=timeout, lease_id=_lease_id, allow_trailing_dot=self._config.allow_trailing_dot, file_request_intent=self._config.file_request_intent, comp=comp, copy_action_abort_constant=copy_action_abort_constant, - version=self._config.version, headers=_headers, params=_params, ) @@ -3213,6 +3213,7 @@ def list_handles( _request = build_list_handles_request( url=self._config.url, + version=self._config.version, marker=marker, maxresults=maxresults, timeout=timeout, @@ -3220,7 +3221,6 @@ def list_handles( allow_trailing_dot=self._config.allow_trailing_dot, file_request_intent=self._config.file_request_intent, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -3302,13 +3302,13 @@ def force_close_handles( # pylint: disable=inconsistent-return-statements _request = build_force_close_handles_request( url=self._config.url, handle_id=handle_id, + version=self._config.version, timeout=timeout, marker=marker, sharesnapshot=sharesnapshot, allow_trailing_dot=self._config.allow_trailing_dot, file_request_intent=self._config.file_request_intent, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -3345,7 +3345,7 @@ def force_close_handles( # pylint: disable=inconsistent-return-statements return cls(pipeline_response, None, response_headers) # type: ignore @distributed_trace - def rename( # pylint: disable=inconsistent-return-statements + def rename( # pylint: disable=inconsistent-return-statements,too-many-locals self, rename_source: str, timeout: Optional[int] = None, @@ -3453,6 +3453,7 @@ def rename( # pylint: disable=inconsistent-return-statements _request = build_rename_request( url=self._config.url, rename_source=rename_source, + version=self._config.version, timeout=timeout, replace_if_exists=replace_if_exists, ignore_read_only=ignore_read_only, @@ -3471,7 +3472,6 @@ def rename( # pylint: disable=inconsistent-return-statements allow_source_trailing_dot=self._config.allow_source_trailing_dot, file_request_intent=self._config.file_request_intent, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -3593,6 +3593,7 @@ def create_symbolic_link( # pylint: disable=inconsistent-return-statements _request = build_create_symbolic_link_request( url=self._config.url, link_text=link_text, + version=self._config.version, timeout=timeout, metadata=metadata, file_creation_time=file_creation_time, @@ -3603,7 +3604,6 @@ def create_symbolic_link( # pylint: disable=inconsistent-return-statements group=group, file_request_intent=self._config.file_request_intent, restype=restype, - version=self._config.version, headers=_headers, params=_params, ) @@ -3694,12 +3694,12 @@ def get_symbolic_link( # pylint: disable=inconsistent-return-statements _request = build_get_symbolic_link_request( url=self._config.url, + version=self._config.version, timeout=timeout, sharesnapshot=sharesnapshot, request_id_parameter=request_id_parameter, file_request_intent=self._config.file_request_intent, restype=restype, - version=self._config.version, headers=_headers, params=_params, ) @@ -3786,13 +3786,13 @@ def create_hard_link( # pylint: disable=inconsistent-return-statements _request = build_create_hard_link_request( url=self._config.url, target_file=target_file, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, lease_id=_lease_id, file_request_intent=self._config.file_request_intent, restype=restype, file_type_constant=file_type_constant, - version=self._config.version, headers=_headers, params=_params, ) diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_service_operations.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_service_operations.py index 710745f74d5a..11db434df4f1 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_service_operations.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_service_operations.py @@ -38,6 +38,7 @@ def build_set_properties_request( url: str, *, content: Any, + version: str, timeout: Optional[int] = None, file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None, **kwargs: Any @@ -48,7 +49,6 @@ def build_set_properties_request( restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service")) comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -79,6 +79,7 @@ def build_set_properties_request( def build_get_properties_request( url: str, *, + version: str, timeout: Optional[int] = None, file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None, **kwargs: Any @@ -88,7 +89,6 @@ def build_get_properties_request( restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service")) comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -117,6 +117,7 @@ def build_get_properties_request( def build_list_shares_segment_request( url: str, *, + version: str, prefix: Optional[str] = None, marker: Optional[str] = None, maxresults: Optional[int] = None, @@ -129,7 +130,6 @@ def build_list_shares_segment_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) comp: Literal["list"] = kwargs.pop("comp", _params.pop("comp", "list")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -163,7 +163,13 @@ def build_list_shares_segment_request( def build_get_user_delegation_key_request( - url: str, *, content: Any, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any + url: str, + *, + content: Any, + version: str, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) @@ -171,7 +177,6 @@ def build_get_user_delegation_key_request( restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service")) comp: Literal["userdelegationkey"] = kwargs.pop("comp", _params.pop("comp", "userdelegationkey")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -256,12 +261,12 @@ def set_properties( # pylint: disable=inconsistent-return-statements _request = build_set_properties_request( url=self._config.url, + version=self._config.version, timeout=timeout, file_request_intent=self._config.file_request_intent, restype=restype, comp=comp, content_type=content_type, - version=self._config.version, content=_content, headers=_headers, params=_params, @@ -321,11 +326,11 @@ def get_properties(self, timeout: Optional[int] = None, **kwargs: Any) -> _model _request = build_get_properties_request( url=self._config.url, + version=self._config.version, timeout=timeout, file_request_intent=self._config.file_request_intent, restype=restype, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -410,6 +415,7 @@ def list_shares_segment( _request = build_list_shares_segment_request( url=self._config.url, + version=self._config.version, prefix=prefix, marker=marker, maxresults=maxresults, @@ -417,7 +423,6 @@ def list_shares_segment( timeout=timeout, file_request_intent=self._config.file_request_intent, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -495,12 +500,12 @@ def get_user_delegation_key( _request = build_get_user_delegation_key_request( url=self._config.url, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, restype=restype, comp=comp, content_type=content_type, - version=self._config.version, content=_content, headers=_headers, params=_params, diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_share_operations.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_share_operations.py index bb040e11d5b5..464f7e4f6614 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_share_operations.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_share_operations.py @@ -38,6 +38,7 @@ def build_create_request( url: str, *, + version: str, timeout: Optional[int] = None, metadata: Optional[dict[str, str]] = None, quota: Optional[int] = None, @@ -58,7 +59,6 @@ def build_create_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -124,6 +124,7 @@ def build_create_request( def build_get_properties_request( url: str, *, + version: str, sharesnapshot: Optional[str] = None, timeout: Optional[int] = None, lease_id: Optional[str] = None, @@ -134,7 +135,6 @@ def build_get_properties_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -166,6 +166,7 @@ def build_get_properties_request( def build_delete_request( url: str, *, + version: str, sharesnapshot: Optional[str] = None, timeout: Optional[int] = None, delete_snapshots: Optional[Union[str, _models.DeleteSnapshotsOptionType]] = None, @@ -177,7 +178,6 @@ def build_delete_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -211,6 +211,7 @@ def build_delete_request( def build_acquire_lease_request( url: str, *, + version: str, timeout: Optional[int] = None, duration: Optional[int] = None, proposed_lease_id: Optional[str] = None, @@ -225,7 +226,6 @@ def build_acquire_lease_request( comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) action: Literal["acquire"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "acquire")) restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -264,6 +264,7 @@ def build_release_lease_request( url: str, *, lease_id: str, + version: str, timeout: Optional[int] = None, sharesnapshot: Optional[str] = None, request_id_parameter: Optional[str] = None, @@ -276,7 +277,6 @@ def build_release_lease_request( comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) action: Literal["release"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "release")) restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -312,6 +312,7 @@ def build_change_lease_request( url: str, *, lease_id: str, + version: str, timeout: Optional[int] = None, proposed_lease_id: Optional[str] = None, sharesnapshot: Optional[str] = None, @@ -325,7 +326,6 @@ def build_change_lease_request( comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) action: Literal["change"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "change")) restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -363,6 +363,7 @@ def build_renew_lease_request( url: str, *, lease_id: str, + version: str, timeout: Optional[int] = None, sharesnapshot: Optional[str] = None, request_id_parameter: Optional[str] = None, @@ -375,7 +376,6 @@ def build_renew_lease_request( comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) action: Literal["renew"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "renew")) restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -410,6 +410,7 @@ def build_renew_lease_request( def build_break_lease_request( url: str, *, + version: str, timeout: Optional[int] = None, break_period: Optional[int] = None, lease_id: Optional[str] = None, @@ -424,7 +425,6 @@ def build_break_lease_request( comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) action: Literal["break"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "break")) restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -462,6 +462,7 @@ def build_break_lease_request( def build_create_snapshot_request( url: str, *, + version: str, timeout: Optional[int] = None, metadata: Optional[dict[str, str]] = None, file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None, @@ -472,7 +473,6 @@ def build_create_snapshot_request( restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share")) comp: Literal["snapshot"] = kwargs.pop("comp", _params.pop("comp", "snapshot")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -503,6 +503,7 @@ def build_create_snapshot_request( def build_create_permission_request( url: str, *, + version: str, timeout: Optional[int] = None, file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None, **kwargs: Any @@ -513,7 +514,6 @@ def build_create_permission_request( restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share")) comp: Literal["filepermission"] = kwargs.pop("comp", _params.pop("comp", "filepermission")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -545,6 +545,7 @@ def build_get_permission_request( url: str, *, file_permission_key: str, + version: str, file_permission_format: Optional[Union[str, _models.FilePermissionFormat]] = None, timeout: Optional[int] = None, file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None, @@ -555,7 +556,6 @@ def build_get_permission_request( restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share")) comp: Literal["filepermission"] = kwargs.pop("comp", _params.pop("comp", "filepermission")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -589,6 +589,7 @@ def build_get_permission_request( def build_set_properties_request( url: str, *, + version: str, timeout: Optional[int] = None, quota: Optional[int] = None, access_tier: Optional[Union[str, _models.ShareAccessTier]] = None, @@ -609,7 +610,6 @@ def build_set_properties_request( restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share")) comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -674,6 +674,7 @@ def build_set_properties_request( def build_set_metadata_request( url: str, *, + version: str, timeout: Optional[int] = None, metadata: Optional[dict[str, str]] = None, lease_id: Optional[str] = None, @@ -685,7 +686,6 @@ def build_set_metadata_request( restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share")) comp: Literal["metadata"] = kwargs.pop("comp", _params.pop("comp", "metadata")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -718,6 +718,7 @@ def build_set_metadata_request( def build_get_access_policy_request( url: str, *, + version: str, timeout: Optional[int] = None, lease_id: Optional[str] = None, file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None, @@ -728,7 +729,6 @@ def build_get_access_policy_request( restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share")) comp: Literal["acl"] = kwargs.pop("comp", _params.pop("comp", "acl")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -759,6 +759,7 @@ def build_get_access_policy_request( def build_set_access_policy_request( url: str, *, + version: str, timeout: Optional[int] = None, lease_id: Optional[str] = None, content: Any = None, @@ -771,7 +772,6 @@ def build_set_access_policy_request( restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share")) comp: Literal["acl"] = kwargs.pop("comp", _params.pop("comp", "acl")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -804,6 +804,7 @@ def build_set_access_policy_request( def build_get_statistics_request( url: str, *, + version: str, timeout: Optional[int] = None, lease_id: Optional[str] = None, file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None, @@ -814,7 +815,6 @@ def build_get_statistics_request( restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share")) comp: Literal["stats"] = kwargs.pop("comp", _params.pop("comp", "stats")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -845,6 +845,7 @@ def build_get_statistics_request( def build_restore_request( url: str, *, + version: str, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, deleted_share_name: Optional[str] = None, @@ -857,7 +858,6 @@ def build_restore_request( restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share")) comp: Literal["undelete"] = kwargs.pop("comp", _params.pop("comp", "undelete")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -911,7 +911,7 @@ def __init__(self, *args, **kwargs) -> None: self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace - def create( # pylint: disable=inconsistent-return-statements + def create( # pylint: disable=inconsistent-return-statements,too-many-locals self, timeout: Optional[int] = None, metadata: Optional[dict[str, str]] = None, @@ -997,6 +997,7 @@ def create( # pylint: disable=inconsistent-return-statements _request = build_create_request( url=self._config.url, + version=self._config.version, timeout=timeout, metadata=metadata, quota=quota, @@ -1012,7 +1013,6 @@ def create( # pylint: disable=inconsistent-return-statements enable_smb_directory_lease=enable_smb_directory_lease, file_request_intent=self._config.file_request_intent, restype=restype, - version=self._config.version, headers=_headers, params=_params, ) @@ -1101,12 +1101,12 @@ def get_properties( # pylint: disable=inconsistent-return-statements _request = build_get_properties_request( url=self._config.url, + version=self._config.version, sharesnapshot=sharesnapshot, timeout=timeout, lease_id=_lease_id, file_request_intent=self._config.file_request_intent, restype=restype, - version=self._config.version, headers=_headers, params=_params, ) @@ -1244,13 +1244,13 @@ def delete( # pylint: disable=inconsistent-return-statements _request = build_delete_request( url=self._config.url, + version=self._config.version, sharesnapshot=sharesnapshot, timeout=timeout, delete_snapshots=delete_snapshots, lease_id=_lease_id, file_request_intent=self._config.file_request_intent, restype=restype, - version=self._config.version, headers=_headers, params=_params, ) @@ -1340,6 +1340,7 @@ def acquire_lease( # pylint: disable=inconsistent-return-statements _request = build_acquire_lease_request( url=self._config.url, + version=self._config.version, timeout=timeout, duration=duration, proposed_lease_id=proposed_lease_id, @@ -1349,7 +1350,6 @@ def acquire_lease( # pylint: disable=inconsistent-return-statements comp=comp, action=action, restype=restype, - version=self._config.version, headers=_headers, params=_params, ) @@ -1433,6 +1433,7 @@ def release_lease( # pylint: disable=inconsistent-return-statements _request = build_release_lease_request( url=self._config.url, lease_id=lease_id, + version=self._config.version, timeout=timeout, sharesnapshot=sharesnapshot, request_id_parameter=request_id_parameter, @@ -1440,7 +1441,6 @@ def release_lease( # pylint: disable=inconsistent-return-statements comp=comp, action=action, restype=restype, - version=self._config.version, headers=_headers, params=_params, ) @@ -1528,6 +1528,7 @@ def change_lease( # pylint: disable=inconsistent-return-statements _request = build_change_lease_request( url=self._config.url, lease_id=lease_id, + version=self._config.version, timeout=timeout, proposed_lease_id=proposed_lease_id, sharesnapshot=sharesnapshot, @@ -1536,7 +1537,6 @@ def change_lease( # pylint: disable=inconsistent-return-statements comp=comp, action=action, restype=restype, - version=self._config.version, headers=_headers, params=_params, ) @@ -1620,6 +1620,7 @@ def renew_lease( # pylint: disable=inconsistent-return-statements _request = build_renew_lease_request( url=self._config.url, lease_id=lease_id, + version=self._config.version, timeout=timeout, sharesnapshot=sharesnapshot, request_id_parameter=request_id_parameter, @@ -1627,7 +1628,6 @@ def renew_lease( # pylint: disable=inconsistent-return-statements comp=comp, action=action, restype=restype, - version=self._config.version, headers=_headers, params=_params, ) @@ -1723,6 +1723,7 @@ def break_lease( # pylint: disable=inconsistent-return-statements _request = build_break_lease_request( url=self._config.url, + version=self._config.version, timeout=timeout, break_period=break_period, lease_id=_lease_id, @@ -1732,7 +1733,6 @@ def break_lease( # pylint: disable=inconsistent-return-statements comp=comp, action=action, restype=restype, - version=self._config.version, headers=_headers, params=_params, ) @@ -1803,12 +1803,12 @@ def create_snapshot( # pylint: disable=inconsistent-return-statements _request = build_create_snapshot_request( url=self._config.url, + version=self._config.version, timeout=timeout, metadata=metadata, file_request_intent=self._config.file_request_intent, restype=restype, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -1936,12 +1936,12 @@ def create_permission( # pylint: disable=inconsistent-return-statements _request = build_create_permission_request( url=self._config.url, + version=self._config.version, timeout=timeout, file_request_intent=self._config.file_request_intent, restype=restype, comp=comp, content_type=content_type, - version=self._config.version, json=_json, content=_content, headers=_headers, @@ -2021,12 +2021,12 @@ def get_permission( _request = build_get_permission_request( url=self._config.url, file_permission_key=file_permission_key, + version=self._config.version, file_permission_format=file_permission_format, timeout=timeout, file_request_intent=self._config.file_request_intent, restype=restype, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -2060,7 +2060,7 @@ def get_permission( return deserialized # type: ignore @distributed_trace - def set_properties( # pylint: disable=inconsistent-return-statements + def set_properties( # pylint: disable=inconsistent-return-statements,too-many-locals self, timeout: Optional[int] = None, quota: Optional[int] = None, @@ -2146,6 +2146,7 @@ def set_properties( # pylint: disable=inconsistent-return-statements _request = build_set_properties_request( url=self._config.url, + version=self._config.version, timeout=timeout, quota=quota, access_tier=access_tier, @@ -2161,7 +2162,6 @@ def set_properties( # pylint: disable=inconsistent-return-statements file_request_intent=self._config.file_request_intent, restype=restype, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -2259,13 +2259,13 @@ def set_metadata( # pylint: disable=inconsistent-return-statements _request = build_set_metadata_request( url=self._config.url, + version=self._config.version, timeout=timeout, metadata=metadata, lease_id=_lease_id, file_request_intent=self._config.file_request_intent, restype=restype, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -2337,12 +2337,12 @@ def get_access_policy( _request = build_get_access_policy_request( url=self._config.url, + version=self._config.version, timeout=timeout, lease_id=_lease_id, file_request_intent=self._config.file_request_intent, restype=restype, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -2430,13 +2430,13 @@ def set_access_policy( # pylint: disable=inconsistent-return-statements _request = build_set_access_policy_request( url=self._config.url, + version=self._config.version, timeout=timeout, lease_id=_lease_id, file_request_intent=self._config.file_request_intent, restype=restype, comp=comp, content_type=content_type, - version=self._config.version, content=_content, headers=_headers, params=_params, @@ -2509,12 +2509,12 @@ def get_statistics( _request = build_get_statistics_request( url=self._config.url, + version=self._config.version, timeout=timeout, lease_id=_lease_id, file_request_intent=self._config.file_request_intent, restype=restype, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -2596,6 +2596,7 @@ def restore( # pylint: disable=inconsistent-return-statements _request = build_restore_request( url=self._config.url, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, deleted_share_name=deleted_share_name, @@ -2603,7 +2604,6 @@ def restore( # pylint: disable=inconsistent-return-statements file_request_intent=self._config.file_request_intent, restype=restype, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_serialize.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_serialize.py index 84d30e55bec4..bd157885fc8d 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_serialize.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_serialize.py @@ -51,6 +51,7 @@ '2025-07-05', '2025-11-05', '2026-02-06', + '2026-04-06', ] diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_share_client.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_share_client.py index 9f4626c8421d..1330b66fcf9e 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_share_client.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_share_client.py @@ -126,11 +126,15 @@ def __init__( self.allow_trailing_dot = kwargs.pop('allow_trailing_dot', None) self.allow_source_trailing_dot = kwargs.pop('allow_source_trailing_dot', None) self.file_request_intent = token_intent - self._client = AzureFileStorage(url=self.url, base_url=self.url, pipeline=self._pipeline, - allow_trailing_dot=self.allow_trailing_dot, - allow_source_trailing_dot=self.allow_source_trailing_dot, - file_request_intent=self.file_request_intent) - self._client._config.version = get_api_version(kwargs) # type: ignore [assignment] + self._client = AzureFileStorage( + version=get_api_version(kwargs), + url=self.url, + base_url=self.url, + pipeline=self._pipeline, + allow_trailing_dot=self.allow_trailing_dot, + allow_source_trailing_dot=self.allow_source_trailing_dot, + file_request_intent=self.file_request_intent + ) def __enter__(self) -> Self: self._client.__enter__() diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_share_service_client.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_share_service_client.py index b8f861d1556b..8c35e2ee3d3b 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_share_service_client.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_share_service_client.py @@ -124,11 +124,15 @@ def __init__( self.allow_trailing_dot = kwargs.pop('allow_trailing_dot', None) self.allow_source_trailing_dot = kwargs.pop('allow_source_trailing_dot', None) self.file_request_intent = token_intent - self._client = AzureFileStorage(url=self.url, base_url=self.url, pipeline=self._pipeline, - allow_trailing_dot=self.allow_trailing_dot, - allow_source_trailing_dot=self.allow_source_trailing_dot, - file_request_intent=self.file_request_intent) - self._client._config.version = get_api_version(kwargs) # type: ignore [assignment] + self._client = AzureFileStorage( + version=get_api_version(kwargs), + url=self.url, + base_url=self.url, + pipeline=self._pipeline, + allow_trailing_dot=self.allow_trailing_dot, + allow_source_trailing_dot=self.allow_source_trailing_dot, + file_request_intent=self.file_request_intent + ) def __enter__(self) -> Self: self._client.__enter__() @@ -203,6 +207,7 @@ def get_user_delegation_key( *, expiry: "datetime", start: Optional["datetime"] = None, + delegated_user_tid: Optional[str] = None, timeout: Optional[int] = None, **kwargs: Any ) -> "UserDelegationKey": @@ -217,6 +222,7 @@ def get_user_delegation_key( :keyword start: A DateTime value. Indicates when the key becomes valid. :paramtype start: Optional[~datetime.datetime] + :keyword str delegated_user_tid: The delegated user tenant id in Entra ID. :keyword int timeout: Sets the server-side timeout for the operation in seconds. For more details see https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. @@ -226,7 +232,11 @@ def get_user_delegation_key( :return: The user delegation key. :rtype: ~azure.storage.fileshare.UserDelegationKey """ - key_info = KeyInfo(start=_to_utc_datetime(start), expiry=_to_utc_datetime(expiry)) + key_info = KeyInfo( + start=_to_utc_datetime(start), + expiry=_to_utc_datetime(expiry), + delegated_user_tid=delegated_user_tid + ) try: user_delegation_key = self._client.service.get_user_delegation_key( # type: ignore key_info=key_info, diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_share_service_client.pyi b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_share_service_client.pyi index ec010cfa29fa..0309635c7c18 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_share_service_client.pyi +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_share_service_client.pyi @@ -67,6 +67,7 @@ class ShareServiceClient(StorageAccountHostsMixin): *, expiry: datetime, start: Optional[datetime] = None, + delegated_user_tid: Optional[str] = None, timeout: Optional[int] = None, **kwargs: Any ) -> UserDelegationKey: ... diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/models.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/models.py index e8835e7ccb03..a5a3a789a425 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/models.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/models.py @@ -89,6 +89,8 @@ class StorageErrorCode(str, Enum, metaclass=CaseInsensitiveEnumMeta): COPY_ID_MISMATCH = "CopyIdMismatch" FEATURE_VERSION_MISMATCH = "FeatureVersionMismatch" INCREMENTAL_COPY_BLOB_MISMATCH = "IncrementalCopyBlobMismatch" + INCREMENTAL_COPY_OF_EARLIER_SNAPSHOT_NOT_ALLOWED = "IncrementalCopyOfEarlierSnapshotNotAllowed" + #: Deprecated: Please use INCREMENTAL_COPY_OF_EARLIER_SNAPSHOT_NOT_ALLOWED instead. INCREMENTAL_COPY_OF_EARLIER_VERSION_SNAPSHOT_NOT_ALLOWED = "IncrementalCopyOfEarlierVersionSnapshotNotAllowed" #: Deprecated: Please use INCREMENTAL_COPY_OF_EARLIER_VERSION_SNAPSHOT_NOT_ALLOWED instead. INCREMENTAL_COPY_OF_ERALIER_VERSION_SNAPSHOT_NOT_ALLOWED = "IncrementalCopyOfEarlierVersionSnapshotNotAllowed" @@ -153,11 +155,15 @@ class StorageErrorCode(str, Enum, metaclass=CaseInsensitiveEnumMeta): # File values CANNOT_DELETE_FILE_OR_DIRECTORY = "CannotDeleteFileOrDirectory" CLIENT_CACHE_FLUSH_DELAY = "ClientCacheFlushDelay" + CONTAINER_QUOTA_DOWNGRADE_NOT_ALLOWED = "ContainerQuotaDowngradeNotAllowed" DELETE_PENDING = "DeletePending" DIRECTORY_NOT_EMPTY = "DirectoryNotEmpty" FILE_LOCK_CONFLICT = "FileLockConflict" FILE_SHARE_PROVISIONED_BANDWIDTH_DOWNGRADE_NOT_ALLOWED = "FileShareProvisionedBandwidthDowngradeNotAllowed" + FILE_SHARE_PROVISIONED_BANDWIDTH_INVALID = "FileShareProvisionedBandwidthInvalid" FILE_SHARE_PROVISIONED_IOPS_DOWNGRADE_NOT_ALLOWED = "FileShareProvisionedIopsDowngradeNotAllowed" + FILE_SHARE_PROVISIONED_IOPS_INVALID = "FileShareProvisionedIopsInvalid" + FILE_SHARE_PROVISIONED_STORAGE_INVALID = "FileShareProvisionedStorageInvalid" INVALID_FILE_OR_DIRECTORY_PATH_NAME = "InvalidFileOrDirectoryPathName" PARENT_NOT_FOUND = "ParentNotFound" READ_ONLY_ATTRIBUTE = "ReadOnlyAttribute" @@ -171,7 +177,10 @@ class StorageErrorCode(str, Enum, metaclass=CaseInsensitiveEnumMeta): SHARE_SNAPSHOT_NOT_FOUND = "ShareSnapshotNotFound" SHARE_SNAPSHOT_OPERATION_NOT_SUPPORTED = "ShareSnapshotOperationNotSupported" SHARE_HAS_SNAPSHOTS = "ShareHasSnapshots" - CONTAINER_QUOTA_DOWNGRADE_NOT_ALLOWED = "ContainerQuotaDowngradeNotAllowed" + TOTAL_SHARES_PROVISIONED_CAPACITY_EXCEEDS_ACCOUNT_LIMIT = "TotalSharesProvisionedCapacityExceedsAccountLimit" + TOTAL_SHARES_PROVISIONED_IOPS_EXCEEDS_ACCOUNT_LIMIT = "TotalSharesProvisionedIopsExceedsAccountLimit" + TOTAL_SHARES_PROVISIONED_BANDWIDTH_EXCEEDS_ACCOUNT_LIMIT = "TotalSharesProvisionedBandwidthExceedsAccountLimit" + TOTAL_SHARES_COUNT_EXCEEDS_ACCOUNT_LIMIT = "TotalSharesCountExceedsAccountLimit" # DataLake values CONTENT_LENGTH_MUST_BE_ZERO = "ContentLengthMustBeZero" @@ -517,6 +526,8 @@ class UserDelegationKey(object): """Object ID of this token.""" signed_tid: Optional[str] = None """Tenant ID of the tenant that issued this token.""" + signed_delegated_user_tid: Optional[str] = None + """User Tenant ID of this token.""" signed_start: Optional[str] = None """The datetime this token becomes valid.""" signed_expiry: Optional[str] = None @@ -531,6 +542,7 @@ class UserDelegationKey(object): def __init__(self): self.signed_oid = None self.signed_tid = None + self.signed_delegated_user_tid = None self.signed_start = None self.signed_expiry = None self.signed_service = None diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/response_handlers.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/response_handlers.py index 750838e3129a..9a079c56404f 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/response_handlers.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/response_handlers.py @@ -201,6 +201,7 @@ def parse_to_internal_user_delegation_key(service_user_delegation_key): internal_user_delegation_key = UserDelegationKey() internal_user_delegation_key.signed_oid = service_user_delegation_key.signed_oid internal_user_delegation_key.signed_tid = service_user_delegation_key.signed_tid + internal_user_delegation_key.signed_delegated_user_tid = service_user_delegation_key.signed_delegated_user_tid internal_user_delegation_key.signed_start = _to_utc_datetime(service_user_delegation_key.signed_start) internal_user_delegation_key.signed_expiry = _to_utc_datetime(service_user_delegation_key.signed_expiry) internal_user_delegation_key.signed_service = service_user_delegation_key.signed_service diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/shared_access_signature.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/shared_access_signature.py index aa16d249e070..9ddcf5c43463 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/shared_access_signature.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/shared_access_signature.py @@ -42,6 +42,8 @@ class QueryStringConstants(object): SIGNED_KEY_SERVICE = "sks" SIGNED_KEY_VERSION = "skv" SIGNED_ENCRYPTION_SCOPE = "ses" + SIGNED_REQUEST_HEADERS = "srh" + SIGNED_REQUEST_QUERY_PARAMS = "srq" SIGNED_KEY_DELEGATED_USER_TID = "skdutid" SIGNED_DELEGATED_USER_OID = "sduoid" @@ -81,6 +83,8 @@ def to_list(): QueryStringConstants.SIGNED_KEY_SERVICE, QueryStringConstants.SIGNED_KEY_VERSION, QueryStringConstants.SIGNED_ENCRYPTION_SCOPE, + QueryStringConstants.SIGNED_REQUEST_HEADERS, + QueryStringConstants.SIGNED_REQUEST_QUERY_PARAMS, QueryStringConstants.SIGNED_KEY_DELEGATED_USER_TID, QueryStringConstants.SIGNED_DELEGATED_USER_OID, # for ADLS @@ -178,6 +182,10 @@ def __init__(self): self.query_dict = {} self.string_to_sign = "" + # STS-only values for dynamic user delegation SAS + self._sts_srh = "" # newline-delimited "k:v" + trailing newline (or empty) + self._sts_srq = "" # newline-delimited "k:v" + leading newline (or empty) + def _add_query(self, name, val): if val: self.query_dict[name] = str(val) if val is not None else None @@ -218,6 +226,28 @@ def add_override_response_headers( self._add_query(QueryStringConstants.SIGNED_CONTENT_LANGUAGE, content_language) self._add_query(QueryStringConstants.SIGNED_CONTENT_TYPE, content_type) + def add_request_headers(self, request_headers): + if not request_headers: + return + + # String-to-Sign (not encoded): "k1:v1\nk2:v2\n...kn:vn\n" + self._sts_srh = "\n".join([f"{k}:{v}" for k, v in request_headers.items()]) + "\n" + + # SAS query param: comma-separated list of encoded header keys only + srh_keys = ",".join([url_quote(k) for k in request_headers.keys()]) + self._add_query(QueryStringConstants.SIGNED_REQUEST_HEADERS, srh_keys) + + def add_request_query_params(self, request_query_params): + if not request_query_params: + return + + # String-to-Sign (not encoded): "k1:v1\nk2:v2\n...kn:vn\n" + self._sts_srq = "\n" + "\n".join([f"{k}:{v}" for k, v in request_query_params.items()]) + + # SAS query param: comma-separated list of encoded query-param keys only + srq_keys = ",".join([url_quote(k) for k in request_query_params.keys()]) + self._add_query(QueryStringConstants.SIGNED_REQUEST_QUERY_PARAMS, srq_keys) + def add_account_signature(self, account_name, account_key): def get_value_to_append(query): return_value = self.query_dict.get(query) or "" diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared_access_signature.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared_access_signature.py index f64042af5a77..6880a52022a8 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared_access_signature.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared_access_signature.py @@ -292,6 +292,10 @@ def get_value_to_append(query): self._add_query(QueryStringConstants.SIGNED_KEY_EXPIRY, user_delegation_key.signed_expiry) self._add_query(QueryStringConstants.SIGNED_KEY_SERVICE, user_delegation_key.signed_service) self._add_query(QueryStringConstants.SIGNED_KEY_VERSION, user_delegation_key.signed_version) + self._add_query( + QueryStringConstants.SIGNED_KEY_DELEGATED_USER_TID, + user_delegation_key.signed_delegated_user_tid + ) string_to_sign += \ (get_value_to_append(QueryStringConstants.SIGNED_OID) + diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_directory_client_async.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_directory_client_async.py index aefaa444565c..4fc04bb91d26 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_directory_client_async.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_directory_client_async.py @@ -137,11 +137,15 @@ def __init__( self.allow_trailing_dot = kwargs.pop('allow_trailing_dot', None) self.allow_source_trailing_dot = kwargs.pop('allow_source_trailing_dot', None) self.file_request_intent = token_intent - self._client = AzureFileStorage(url=self.url, base_url=self.url, pipeline=self._pipeline, - allow_trailing_dot=self.allow_trailing_dot, - allow_source_trailing_dot=self.allow_source_trailing_dot, - file_request_intent=self.file_request_intent) - self._client._config.version = get_api_version(kwargs) # type: ignore [assignment] + self._client = AzureFileStorage( + version=get_api_version(kwargs), + url=self.url, + base_url=self.url, + pipeline=self._pipeline, + allow_trailing_dot=self.allow_trailing_dot, + allow_source_trailing_dot=self.allow_source_trailing_dot, + file_request_intent=self.file_request_intent + ) async def __aenter__(self) -> Self: await self._client.__aenter__() diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_file_client_async.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_file_client_async.py index 0de3e70eae2c..75d77c35cc1f 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_file_client_async.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_file_client_async.py @@ -13,7 +13,7 @@ from io import BytesIO from typing import ( Any, AnyStr, AsyncGenerator, AsyncIterable, Callable, cast, - Dict, IO, Iterable, List, Optional, Tuple, Union, + Dict, IO, Iterable, List, Literal, Optional, Tuple, Union, TYPE_CHECKING ) from typing_extensions import Self @@ -51,11 +51,6 @@ from ._lease_async import ShareLeaseClient from ._models import FileProperties, Handle, HandlesPaged -if sys.version_info >= (3, 8): - from typing import Literal -else: - from typing_extensions import Literal - if TYPE_CHECKING: from azure.core.credentials import AzureNamedKeyCredential, AzureSasCredential from azure.core.credentials_async import AsyncTokenCredential @@ -201,11 +196,15 @@ def __init__( self.allow_trailing_dot = kwargs.pop('allow_trailing_dot', None) self.allow_source_trailing_dot = kwargs.pop('allow_source_trailing_dot', None) self.file_request_intent = token_intent - self._client = AzureFileStorage(url=self.url, base_url=self.url, pipeline=self._pipeline, - allow_trailing_dot=self.allow_trailing_dot, - allow_source_trailing_dot=self.allow_source_trailing_dot, - file_request_intent=self.file_request_intent) - self._client._config.version = get_api_version(kwargs) # type: ignore [assignment] + self._client = AzureFileStorage( + version=get_api_version(kwargs), + url=self.url, + base_url=self.url, + pipeline=self._pipeline, + allow_trailing_dot=self.allow_trailing_dot, + allow_source_trailing_dot=self.allow_source_trailing_dot, + file_request_intent=self.file_request_intent + ) async def __aenter__(self) -> Self: await self._client.__aenter__() @@ -493,7 +492,7 @@ async def create_file( @distributed_trace_async async def upload_file( - self, data: Union[bytes, str, Iterable[AnyStr], AsyncIterable[AnyStr], IO[AnyStr]], + self, data: Union[bytes, str, Iterable[AnyStr], AsyncIterable[AnyStr], IO[bytes]], length: Optional[int] = None, file_attributes: Optional[Union[str, "NTFSAttributes"]] = None, file_creation_time: Optional[Union[str, datetime]] = None, @@ -506,9 +505,9 @@ async def upload_file( :param data: Content of the file. - :type data: Union[bytes, str, Iterable[AnyStr], AsyncIterable[AnyStr], IO[AnyStr]] + :type data: Union[bytes, str, Iterable[AnyStr], AsyncIterable[AnyStr], IO[bytes]] :param int length: - Length of the file in bytes. Specify its maximum size, up to 1 TiB. + Length of the file in bytes. :param file_attributes: The file system attributes for files and directories. If not set, the default value would be "None" and the attributes will be set to "Archive". diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_file_client_async.pyi b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_file_client_async.pyi index fd205f9c9e80..3218ca795360 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_file_client_async.pyi +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_file_client_async.pyi @@ -139,7 +139,7 @@ class ShareFileClient(AsyncStorageAccountHostsMixin, StorageAccountHostsMixin): @distributed_trace_async async def upload_file( self, - data: Union[bytes, str, Iterable[AnyStr], AsyncIterable[AnyStr], IO[AnyStr]], + data: Union[bytes, str, Iterable[AnyStr], AsyncIterable[AnyStr], IO[bytes]], length: Optional[int] = None, file_attributes: Optional[Union[str, NTFSAttributes]] = None, file_creation_time: Optional[Union[str, datetime]] = None, diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_share_client_async.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_share_client_async.py index a82ca45642c8..2d2cdaf0a3f0 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_share_client_async.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_share_client_async.py @@ -132,11 +132,15 @@ def __init__( self.allow_trailing_dot = kwargs.pop('allow_trailing_dot', None) self.allow_source_trailing_dot = kwargs.pop('allow_source_trailing_dot', None) self.file_request_intent = token_intent - self._client = AzureFileStorage(url=self.url, base_url=self.url, pipeline=self._pipeline, - allow_trailing_dot=self.allow_trailing_dot, - allow_source_trailing_dot=self.allow_source_trailing_dot, - file_request_intent=self.file_request_intent) - self._client._config.version = get_api_version(kwargs) # type: ignore [assignment] + self._client = AzureFileStorage( + version=get_api_version(kwargs), + url=self.url, + base_url=self.url, + pipeline=self._pipeline, + allow_trailing_dot=self.allow_trailing_dot, + allow_source_trailing_dot=self.allow_source_trailing_dot, + file_request_intent=self.file_request_intent + ) async def __aenter__(self) -> Self: await self._client.__aenter__() diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_share_service_client_async.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_share_service_client_async.py index 0af8457c874f..048b8973994c 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_share_service_client_async.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_share_service_client_async.py @@ -127,11 +127,15 @@ def __init__( self.allow_trailing_dot = kwargs.pop('allow_trailing_dot', None) self.allow_source_trailing_dot = kwargs.pop('allow_source_trailing_dot', None) self.file_request_intent = token_intent - self._client = AzureFileStorage(url=self.url, base_url=self.url, pipeline=self._pipeline, - allow_trailing_dot=self.allow_trailing_dot, - allow_source_trailing_dot=self.allow_source_trailing_dot, - file_request_intent=self.file_request_intent) - self._client._config.version = get_api_version(kwargs) # type: ignore [assignment] + self._client = AzureFileStorage( + version=get_api_version(kwargs), + url=self.url, + base_url=self.url, + pipeline=self._pipeline, + allow_trailing_dot=self.allow_trailing_dot, + allow_source_trailing_dot=self.allow_source_trailing_dot, + file_request_intent=self.file_request_intent + ) async def __aenter__(self) -> Self: await self._client.__aenter__() @@ -206,6 +210,7 @@ async def get_user_delegation_key( *, expiry: "datetime", start: Optional["datetime"] = None, + delegated_user_tid: Optional[str] = None, timeout: Optional[int] = None, **kwargs: Any ) -> "UserDelegationKey": @@ -220,6 +225,7 @@ async def get_user_delegation_key( :keyword start: A DateTime value. Indicates when the key becomes valid. :paramtype start: Optional[~datetime.datetime] + :keyword str delegated_user_tid: The delegated user tenant id in Entra ID. :keyword int timeout: Sets the server-side timeout for the operation in seconds. For more details see https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. @@ -229,7 +235,11 @@ async def get_user_delegation_key( :return: The user delegation key. :rtype: ~azure.storage.queue.UserDelegationKey """ - key_info = KeyInfo(start=_to_utc_datetime(start), expiry=_to_utc_datetime(expiry)) + key_info = KeyInfo( + start=_to_utc_datetime(start), + expiry=_to_utc_datetime(expiry), + delegated_user_tid=delegated_user_tid + ) try: user_delegation_key = await self._client.service.get_user_delegation_key( # type: ignore key_info=key_info, diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_share_service_client_async.pyi b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_share_service_client_async.pyi index 5ba4fbe21eb3..bcd04d6f9c85 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_share_service_client_async.pyi +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_share_service_client_async.pyi @@ -70,6 +70,7 @@ class ShareServiceClient(AsyncStorageAccountHostsMixin, StorageAccountHostsMixin *, expiry: datetime, start: Optional[datetime] = None, + delegated_user_tid: Optional[str] = None, timeout: Optional[int] = None, **kwargs: Any ) -> UserDelegationKey: ... diff --git a/sdk/storage/azure-storage-file-share/setup.py b/sdk/storage/azure-storage-file-share/setup.py index c2bd79bb1c36..2e62c6db6a86 100644 --- a/sdk/storage/azure-storage-file-share/setup.py +++ b/sdk/storage/azure-storage-file-share/setup.py @@ -66,14 +66,14 @@ ]), python_requires=">=3.9", install_requires=[ - "azure-core>=1.30.0", + "azure-core>=1.37.0", "cryptography>=2.1.4", "typing-extensions>=4.6.0", "isodate>=0.6.1" ], extras_require={ "aio": [ - "azure-core[aio]>=1.30.0", + "azure-core[aio]>=1.37.0", ], }, ) diff --git a/sdk/storage/azure-storage-file-share/swagger/README.md b/sdk/storage/azure-storage-file-share/swagger/README.md index 7fc4edc102e8..6910c5517eca 100644 --- a/sdk/storage/azure-storage-file-share/swagger/README.md +++ b/sdk/storage/azure-storage-file-share/swagger/README.md @@ -16,7 +16,7 @@ autorest --v3 --python ### Settings ``` yaml -input-file: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/storage/data-plane/Microsoft.FileStorage/stable/2026-02-06/file.json +input-file: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/storage/data-plane/Microsoft.FileStorage/stable/2026-04-06/file.json output-folder: ../azure/storage/fileshare/_generated namespace: azure.storage.fileshare no-namespace-folders: true diff --git a/sdk/storage/azure-storage-file-share/tests/test_share.py b/sdk/storage/azure-storage-file-share/tests/test_share.py index e85ba972a348..3b6572837a68 100644 --- a/sdk/storage/azure-storage-file-share/tests/test_share.py +++ b/sdk/storage/azure-storage-file-share/tests/test_share.py @@ -1904,10 +1904,9 @@ def test_get_user_delegation_sas(self, **kwargs): @pytest.mark.live_test_only @FileSharePreparer() - def test_share_user_delegation_oid(self, **kwargs): + def test_share_cross_tenant_sas(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - data = b"abc123" self._setup(storage_account_name, storage_account_key) token_credential = self.get_credential(ShareServiceClient) @@ -1918,15 +1917,25 @@ def test_share_user_delegation_oid(self, **kwargs): ) start = datetime.utcnow() expiry = datetime.utcnow() + timedelta(hours=1) - user_delegation_key = service.get_user_delegation_key(start=start, expiry=expiry) token = token_credential.get_token("https://storage.azure.com/.default") - user_delegation_oid = jwt.decode(token.token, options={"verify_signature": False}).get("oid") + decoded = jwt.decode(token.token, options={"verify_signature": False}) + user_delegation_oid = decoded.get("oid") + delegated_user_tid = decoded.get("tid") + user_delegation_key = service.get_user_delegation_key( + start=start, + expiry=expiry, + delegated_user_tid=delegated_user_tid + ) + + assert user_delegation_key is not None + assert user_delegation_key.signed_delegated_user_tid == delegated_user_tid share_name = self.get_resource_name("oauthshare") directory_name = self.get_resource_name("oauthdir") file_name = self.get_resource_name("oauthfile") share = service.create_share(share_name) directory = share.create_directory(directory_name) + data = b"abc123" file = directory.upload_file(file_name, data, length=len(data)) share_token = self.generate_sas( @@ -1939,7 +1948,9 @@ def test_share_user_delegation_oid(self, **kwargs): user_delegation_key=user_delegation_key, user_delegation_oid=user_delegation_oid ) + assert "sduoid=" + user_delegation_oid in share_token + assert "skdutid=" + delegated_user_tid in share_token share_client = ShareClient.from_share_url( f"{share.url}?{share_token}", @@ -1960,7 +1971,9 @@ def test_share_user_delegation_oid(self, **kwargs): user_delegation_key=user_delegation_key, user_delegation_oid=user_delegation_oid ) + assert "sduoid=" + user_delegation_oid in file_token + assert "skdutid=" + delegated_user_tid in file_token file_client = ShareFileClient.from_file_url( f"{file.url}?{file_token}", diff --git a/sdk/storage/azure-storage-file-share/tests/test_share_async.py b/sdk/storage/azure-storage-file-share/tests/test_share_async.py index 4283eda52b41..5face0374f45 100644 --- a/sdk/storage/azure-storage-file-share/tests/test_share_async.py +++ b/sdk/storage/azure-storage-file-share/tests/test_share_async.py @@ -1943,10 +1943,9 @@ async def test_get_user_delegation_sas(self, **kwargs): @pytest.mark.live_test_only @FileSharePreparer() - async def test_share_user_delegation_oid(self, **kwargs): + async def test_share_cross_tenant_sas(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - data = b"abc123" self._setup(storage_account_name, storage_account_key) token_credential = self.get_credential(ShareServiceClient, is_async=True) @@ -1957,15 +1956,25 @@ async def test_share_user_delegation_oid(self, **kwargs): ) start = datetime.utcnow() expiry = datetime.utcnow() + timedelta(hours=1) - user_delegation_key = await service.get_user_delegation_key(start=start, expiry=expiry) token = await token_credential.get_token("https://storage.azure.com/.default") - user_delegation_oid = jwt.decode(token.token, options={"verify_signature": False}).get("oid") + decoded = jwt.decode(token.token, options={"verify_signature": False}) + user_delegation_oid = decoded.get("oid") + delegated_user_tid = decoded.get("tid") + user_delegation_key = await service.get_user_delegation_key( + start=start, + expiry=expiry, + delegated_user_tid=delegated_user_tid + ) + + assert user_delegation_key is not None + assert user_delegation_key.signed_delegated_user_tid == delegated_user_tid share_name = self.get_resource_name("oauthshare") directory_name = self.get_resource_name("oauthdir") file_name = self.get_resource_name("oauthfile") share = await service.create_share(share_name) directory = await share.create_directory(directory_name) + data = b"abc123" file = await directory.upload_file(file_name, data, length=len(data)) share_token = self.generate_sas( @@ -1978,7 +1987,9 @@ async def test_share_user_delegation_oid(self, **kwargs): user_delegation_key=user_delegation_key, user_delegation_oid=user_delegation_oid ) + assert "sduoid=" + user_delegation_oid in share_token + assert "skdutid=" + delegated_user_tid in share_token share_client = ShareClient.from_share_url( f"{share.url}?{share_token}", @@ -2001,7 +2012,9 @@ async def test_share_user_delegation_oid(self, **kwargs): user_delegation_key=user_delegation_key, user_delegation_oid=user_delegation_oid ) + assert "sduoid=" + user_delegation_oid in file_token + assert "skdutid=" + delegated_user_tid in file_token file_client = ShareFileClient.from_file_url( f"{file.url}?{file_token}", diff --git a/sdk/storage/azure-storage-queue/CHANGELOG.md b/sdk/storage/azure-storage-queue/CHANGELOG.md index b48d95e54996..e6b4a2b5286d 100644 --- a/sdk/storage/azure-storage-queue/CHANGELOG.md +++ b/sdk/storage/azure-storage-queue/CHANGELOG.md @@ -1,8 +1,15 @@ # Release History -## 12.16.0b1 (Unreleased) +## 12.16.0b1 (2026-01-27) ### Features Added +- Added support for service version 2026-04-06. +- Added support for the keyword `user_delegation_tid` to `QueueServiceClient.get_user_delegation_key` API, which +can be used in `generate_queue_sas` to specify the Tenant ID that is authorized to use the generated SAS URL. +Note that `user_delegation_tid` must be used together with `user_delegation_oid`. + +### Other Changes +- Bumped minimum `azure-core` dependency to 1.37.0. ## 12.15.0 (2026-01-06) diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/_azure_queue_storage.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/_azure_queue_storage.py index 4f06f952c248..8ef85723b820 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/_azure_queue_storage.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/_azure_queue_storage.py @@ -34,17 +34,16 @@ class AzureQueueStorage: # pylint: disable=client-accepts-api-version-keyword :param url: The URL of the service account, queue or message that is the target of the desired operation. Required. :type url: str + :param version: Specifies the version of the operation to use for this request. Required. + :type version: str :param base_url: Service URL. Required. Default value is "". :type base_url: str - :keyword version: Specifies the version of the operation to use for this request. Default value - is "2026-02-06". Note that overriding this default value may result in unsupported behavior. - :paramtype version: str """ def __init__( # pylint: disable=missing-client-constructor-parameter-credential - self, url: str, base_url: str = "", **kwargs: Any + self, url: str, version: str, base_url: str = "", **kwargs: Any ) -> None: - self._config = AzureQueueStorageConfiguration(url=url, **kwargs) + self._config = AzureQueueStorageConfiguration(url=url, version=version, **kwargs) _policies = kwargs.pop("policies", None) if _policies is None: diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/_configuration.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/_configuration.py index 206fac3c2078..04adef0da253 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/_configuration.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/_configuration.py @@ -6,7 +6,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, Literal +from typing import Any from azure.core.pipeline import policies @@ -22,16 +22,15 @@ class AzureQueueStorageConfiguration: # pylint: disable=too-many-instance-attri :param url: The URL of the service account, queue or message that is the target of the desired operation. Required. :type url: str - :keyword version: Specifies the version of the operation to use for this request. Default value - is "2026-02-06". Note that overriding this default value may result in unsupported behavior. - :paramtype version: str + :param version: Specifies the version of the operation to use for this request. Required. + :type version: str """ - def __init__(self, url: str, **kwargs: Any) -> None: - version: Literal["2026-02-06"] = kwargs.pop("version", "2026-02-06") - + def __init__(self, url: str, version: str, **kwargs: Any) -> None: if url is None: raise ValueError("Parameter 'url' must not be None.") + if version is None: + raise ValueError("Parameter 'version' must not be None.") self.url = url self.version = version diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/_utils/serialization.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/_utils/serialization.py index ff543ed937ff..6da830e0cf4a 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/_utils/serialization.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/_utils/serialization.py @@ -821,13 +821,20 @@ def serialize_basic(cls, data, data_type, **kwargs): :param str data_type: Type of object in the iterable. :rtype: str, int, float, bool :return: serialized object + :raises TypeError: raise if data_type is not one of str, int, float, bool. """ custom_serializer = cls._get_custom_serializers(data_type, **kwargs) if custom_serializer: return custom_serializer(data) if data_type == "str": return cls.serialize_unicode(data) - return eval(data_type)(data) # nosec # pylint: disable=eval-used + if data_type == "int": + return int(data) + if data_type == "float": + return float(data) + if data_type == "bool": + return bool(data) + raise TypeError("Unknown basic data type: {}".format(data_type)) @classmethod def serialize_unicode(cls, data): @@ -1757,7 +1764,7 @@ def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return :param str data_type: deserialization data type. :return: Deserialized basic type. :rtype: str, int, float or bool - :raises TypeError: if string format is not valid. + :raises TypeError: if string format is not valid or data_type is not one of str, int, float, bool. """ # If we're here, data is supposed to be a basic type. # If it's still an XML node, take the text @@ -1783,7 +1790,11 @@ def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return if data_type == "str": return self.deserialize_unicode(attr) - return eval(data_type)(attr) # nosec # pylint: disable=eval-used + if data_type == "int": + return int(attr) + if data_type == "float": + return float(attr) + raise TypeError("Unknown basic data type: {}".format(data_type)) @staticmethod def deserialize_unicode(data): diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/_azure_queue_storage.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/_azure_queue_storage.py index 4d3d8146ce36..3b6b202768e2 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/_azure_queue_storage.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/_azure_queue_storage.py @@ -34,17 +34,16 @@ class AzureQueueStorage: # pylint: disable=client-accepts-api-version-keyword :param url: The URL of the service account, queue or message that is the target of the desired operation. Required. :type url: str + :param version: Specifies the version of the operation to use for this request. Required. + :type version: str :param base_url: Service URL. Required. Default value is "". :type base_url: str - :keyword version: Specifies the version of the operation to use for this request. Default value - is "2026-02-06". Note that overriding this default value may result in unsupported behavior. - :paramtype version: str """ def __init__( # pylint: disable=missing-client-constructor-parameter-credential - self, url: str, base_url: str = "", **kwargs: Any + self, url: str, version: str, base_url: str = "", **kwargs: Any ) -> None: - self._config = AzureQueueStorageConfiguration(url=url, **kwargs) + self._config = AzureQueueStorageConfiguration(url=url, version=version, **kwargs) _policies = kwargs.pop("policies", None) if _policies is None: diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/_configuration.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/_configuration.py index 8480a6cf30c1..1c90497920fe 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/_configuration.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/_configuration.py @@ -6,7 +6,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, Literal +from typing import Any from azure.core.pipeline import policies @@ -22,16 +22,15 @@ class AzureQueueStorageConfiguration: # pylint: disable=too-many-instance-attri :param url: The URL of the service account, queue or message that is the target of the desired operation. Required. :type url: str - :keyword version: Specifies the version of the operation to use for this request. Default value - is "2026-02-06". Note that overriding this default value may result in unsupported behavior. - :paramtype version: str + :param version: Specifies the version of the operation to use for this request. Required. + :type version: str """ - def __init__(self, url: str, **kwargs: Any) -> None: - version: Literal["2026-02-06"] = kwargs.pop("version", "2026-02-06") - + def __init__(self, url: str, version: str, **kwargs: Any) -> None: if url is None: raise ValueError("Parameter 'url' must not be None.") + if version is None: + raise ValueError("Parameter 'version' must not be None.") self.url = url self.version = version diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/_message_id_operations.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/_message_id_operations.py index 8c7dd2ffbae0..997087907907 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/_message_id_operations.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/_message_id_operations.py @@ -114,10 +114,10 @@ async def update( url=self._config.url, pop_receipt=pop_receipt, visibilitytimeout=visibilitytimeout, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, content_type=content_type, - version=self._config.version, content=_content, headers=_headers, params=_params, @@ -188,9 +188,9 @@ async def delete( _request = build_delete_request( url=self._config.url, pop_receipt=pop_receipt, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, - version=self._config.version, headers=_headers, params=_params, ) diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/_messages_operations.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/_messages_operations.py index 1eb4f013416d..9fd47e1b4007 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/_messages_operations.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/_messages_operations.py @@ -105,11 +105,11 @@ async def dequeue( _request = build_dequeue_request( url=self._config.url, + version=self._config.version, number_of_messages=number_of_messages, visibilitytimeout=visibilitytimeout, timeout=timeout, request_id_parameter=request_id_parameter, - version=self._config.version, headers=_headers, params=_params, ) @@ -175,9 +175,9 @@ async def clear( _request = build_clear_request( url=self._config.url, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, - version=self._config.version, headers=_headers, params=_params, ) @@ -267,12 +267,12 @@ async def enqueue( _request = build_enqueue_request( url=self._config.url, + version=self._config.version, visibilitytimeout=visibilitytimeout, message_time_to_live=message_time_to_live, timeout=timeout, request_id_parameter=request_id_parameter, content_type=content_type, - version=self._config.version, content=_content, headers=_headers, params=_params, @@ -350,11 +350,11 @@ async def peek( _request = build_peek_request( url=self._config.url, + version=self._config.version, number_of_messages=number_of_messages, timeout=timeout, request_id_parameter=request_id_parameter, peekonly=peekonly, - version=self._config.version, headers=_headers, params=_params, ) diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/_queue_operations.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/_queue_operations.py index 87f58d055aa6..76787fa67345 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/_queue_operations.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/_queue_operations.py @@ -101,10 +101,10 @@ async def create( _request = build_create_request( url=self._config.url, + version=self._config.version, timeout=timeout, metadata=metadata, request_id_parameter=request_id_parameter, - version=self._config.version, headers=_headers, params=_params, ) @@ -166,9 +166,9 @@ async def delete( _request = build_delete_request( url=self._config.url, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, - version=self._config.version, headers=_headers, params=_params, ) @@ -232,10 +232,10 @@ async def get_properties( _request = build_get_properties_request( url=self._config.url, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -313,11 +313,11 @@ async def set_metadata( _request = build_set_metadata_request( url=self._config.url, + version=self._config.version, timeout=timeout, metadata=metadata, request_id_parameter=request_id_parameter, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -381,10 +381,10 @@ async def get_access_policy( _request = build_get_access_policy_request( url=self._config.url, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -467,11 +467,11 @@ async def set_access_policy( _request = build_set_access_policy_request( url=self._config.url, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, comp=comp, content_type=content_type, - version=self._config.version, content=_content, headers=_headers, params=_params, diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/_service_operations.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/_service_operations.py index e083a7710c0c..54a8f75be779 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/_service_operations.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/_service_operations.py @@ -102,12 +102,12 @@ async def set_properties( _request = build_set_properties_request( url=self._config.url, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, restype=restype, comp=comp, content_type=content_type, - version=self._config.version, content=_content, headers=_headers, params=_params, @@ -172,11 +172,11 @@ async def get_properties( _request = build_get_properties_request( url=self._config.url, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, restype=restype, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -245,11 +245,11 @@ async def get_statistics( _request = build_get_statistics_request( url=self._config.url, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, restype=restype, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -327,12 +327,12 @@ async def get_user_delegation_key( _request = build_get_user_delegation_key_request( url=self._config.url, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, restype=restype, comp=comp, content_type=content_type, - version=self._config.version, content=_content, headers=_headers, params=_params, @@ -430,6 +430,7 @@ async def list_queues_segment( _request = build_list_queues_segment_request( url=self._config.url, + version=self._config.version, prefix=prefix, marker=marker, maxresults=maxresults, @@ -437,7 +438,6 @@ async def list_queues_segment( timeout=timeout, request_id_parameter=request_id_parameter, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/models/_models_py3.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/models/_models_py3.py index c8da9843b240..825f04418cf0 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/models/_models_py3.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/models/_models_py3.py @@ -346,6 +346,8 @@ class KeyInfo(_serialization.Model): :vartype start: str :ivar expiry: The date-time the key expires in ISO 8601 UTC time. Required. :vartype expiry: str + :ivar delegated_user_tid: The delegated user tenant id in Azure AD. + :vartype delegated_user_tid: str """ _validation = { @@ -355,18 +357,24 @@ class KeyInfo(_serialization.Model): _attribute_map = { "start": {"key": "Start", "type": "str"}, "expiry": {"key": "Expiry", "type": "str"}, + "delegated_user_tid": {"key": "DelegatedUserTid", "type": "str"}, } - def __init__(self, *, expiry: str, start: Optional[str] = None, **kwargs: Any) -> None: + def __init__( + self, *, expiry: str, start: Optional[str] = None, delegated_user_tid: Optional[str] = None, **kwargs: Any + ) -> None: """ :keyword start: The date-time the key is active in ISO 8601 UTC time. :paramtype start: str :keyword expiry: The date-time the key expires in ISO 8601 UTC time. Required. :paramtype expiry: str + :keyword delegated_user_tid: The delegated user tenant id in Azure AD. + :paramtype delegated_user_tid: str """ super().__init__(**kwargs) self.start = start self.expiry = expiry + self.delegated_user_tid = delegated_user_tid class ListQueuesSegmentResponse(_serialization.Model): @@ -867,6 +875,9 @@ class UserDelegationKey(_serialization.Model): :vartype signed_service: str :ivar signed_version: The service version that created the key. Required. :vartype signed_version: str + :ivar signed_delegated_user_tid: The delegated user tenant id in Azure AD. Return if + DelegatedUserTid is specified. + :vartype signed_delegated_user_tid: str :ivar value: The key as a base64 string. Required. :vartype value: str """ @@ -888,6 +899,7 @@ class UserDelegationKey(_serialization.Model): "signed_expiry": {"key": "SignedExpiry", "type": "iso-8601"}, "signed_service": {"key": "SignedService", "type": "str"}, "signed_version": {"key": "SignedVersion", "type": "str"}, + "signed_delegated_user_tid": {"key": "SignedDelegatedUserTid", "type": "str"}, "value": {"key": "Value", "type": "str"}, } @@ -901,6 +913,7 @@ def __init__( signed_service: str, signed_version: str, value: str, + signed_delegated_user_tid: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -917,6 +930,9 @@ def __init__( :paramtype signed_service: str :keyword signed_version: The service version that created the key. Required. :paramtype signed_version: str + :keyword signed_delegated_user_tid: The delegated user tenant id in Azure AD. Return if + DelegatedUserTid is specified. + :paramtype signed_delegated_user_tid: str :keyword value: The key as a base64 string. Required. :paramtype value: str """ @@ -927,4 +943,5 @@ def __init__( self.signed_expiry = signed_expiry self.signed_service = signed_service self.signed_version = signed_version + self.signed_delegated_user_tid = signed_delegated_user_tid self.value = value diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/operations/_message_id_operations.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/operations/_message_id_operations.py index 20bba7c5d720..39c22976afbe 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/operations/_message_id_operations.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/operations/_message_id_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from collections.abc import MutableMapping -from typing import Any, Callable, Literal, Optional, TypeVar +from typing import Any, Callable, Optional, TypeVar from azure.core import PipelineClient from azure.core.exceptions import ( @@ -39,6 +39,7 @@ def build_update_request( *, pop_receipt: str, visibilitytimeout: int, + version: str, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, content: Any = None, @@ -48,7 +49,6 @@ def build_update_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -82,6 +82,7 @@ def build_delete_request( url: str, *, pop_receipt: str, + version: str, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any @@ -89,7 +90,6 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -196,10 +196,10 @@ def update( # pylint: disable=inconsistent-return-statements url=self._config.url, pop_receipt=pop_receipt, visibilitytimeout=visibilitytimeout, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, content_type=content_type, - version=self._config.version, content=_content, headers=_headers, params=_params, @@ -270,9 +270,9 @@ def delete( # pylint: disable=inconsistent-return-statements _request = build_delete_request( url=self._config.url, pop_receipt=pop_receipt, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, - version=self._config.version, headers=_headers, params=_params, ) diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/operations/_messages_operations.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/operations/_messages_operations.py index d6de48798fc3..290bef2a7776 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/operations/_messages_operations.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/operations/_messages_operations.py @@ -37,6 +37,7 @@ def build_dequeue_request( url: str, *, + version: str, number_of_messages: Optional[int] = None, visibilitytimeout: Optional[int] = None, timeout: Optional[int] = None, @@ -46,7 +47,6 @@ def build_dequeue_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -77,12 +77,11 @@ def build_dequeue_request( def build_clear_request( - url: str, *, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any + url: str, *, version: str, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -110,6 +109,7 @@ def build_enqueue_request( url: str, *, content: Any, + version: str, visibilitytimeout: Optional[int] = None, message_time_to_live: Optional[int] = None, timeout: Optional[int] = None, @@ -120,7 +120,6 @@ def build_enqueue_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -155,6 +154,7 @@ def build_enqueue_request( def build_peek_request( url: str, *, + version: str, number_of_messages: Optional[int] = None, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, @@ -164,7 +164,6 @@ def build_peek_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) peekonly: Literal["true"] = kwargs.pop("peekonly", _params.pop("peekonly", "true")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -259,11 +258,11 @@ def dequeue( _request = build_dequeue_request( url=self._config.url, + version=self._config.version, number_of_messages=number_of_messages, visibilitytimeout=visibilitytimeout, timeout=timeout, request_id_parameter=request_id_parameter, - version=self._config.version, headers=_headers, params=_params, ) @@ -329,9 +328,9 @@ def clear( # pylint: disable=inconsistent-return-statements _request = build_clear_request( url=self._config.url, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, - version=self._config.version, headers=_headers, params=_params, ) @@ -421,12 +420,12 @@ def enqueue( _request = build_enqueue_request( url=self._config.url, + version=self._config.version, visibilitytimeout=visibilitytimeout, message_time_to_live=message_time_to_live, timeout=timeout, request_id_parameter=request_id_parameter, content_type=content_type, - version=self._config.version, content=_content, headers=_headers, params=_params, @@ -504,11 +503,11 @@ def peek( _request = build_peek_request( url=self._config.url, + version=self._config.version, number_of_messages=number_of_messages, timeout=timeout, request_id_parameter=request_id_parameter, peekonly=peekonly, - version=self._config.version, headers=_headers, params=_params, ) diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/operations/_queue_operations.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/operations/_queue_operations.py index 641a08c1fc09..fd4370c3801f 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/operations/_queue_operations.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/operations/_queue_operations.py @@ -37,6 +37,7 @@ def build_create_request( url: str, *, + version: str, timeout: Optional[int] = None, metadata: Optional[dict[str, str]] = None, request_id_parameter: Optional[str] = None, @@ -45,7 +46,6 @@ def build_create_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -72,12 +72,11 @@ def build_create_request( def build_delete_request( - url: str, *, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any + url: str, *, version: str, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -102,13 +101,12 @@ def build_delete_request( def build_get_properties_request( - url: str, *, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any + url: str, *, version: str, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) comp: Literal["metadata"] = kwargs.pop("comp", _params.pop("comp", "metadata")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -136,6 +134,7 @@ def build_get_properties_request( def build_set_metadata_request( url: str, *, + version: str, timeout: Optional[int] = None, metadata: Optional[dict[str, str]] = None, request_id_parameter: Optional[str] = None, @@ -145,7 +144,6 @@ def build_set_metadata_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) comp: Literal["metadata"] = kwargs.pop("comp", _params.pop("comp", "metadata")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -173,13 +171,12 @@ def build_set_metadata_request( def build_get_access_policy_request( - url: str, *, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any + url: str, *, version: str, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) comp: Literal["acl"] = kwargs.pop("comp", _params.pop("comp", "acl")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -207,6 +204,7 @@ def build_get_access_policy_request( def build_set_access_policy_request( url: str, *, + version: str, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, content: Any = None, @@ -217,7 +215,6 @@ def build_set_access_policy_request( comp: Literal["acl"] = kwargs.pop("comp", _params.pop("comp", "acl")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -306,10 +303,10 @@ def create( # pylint: disable=inconsistent-return-statements _request = build_create_request( url=self._config.url, + version=self._config.version, timeout=timeout, metadata=metadata, request_id_parameter=request_id_parameter, - version=self._config.version, headers=_headers, params=_params, ) @@ -371,9 +368,9 @@ def delete( # pylint: disable=inconsistent-return-statements _request = build_delete_request( url=self._config.url, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, - version=self._config.version, headers=_headers, params=_params, ) @@ -437,10 +434,10 @@ def get_properties( # pylint: disable=inconsistent-return-statements _request = build_get_properties_request( url=self._config.url, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -518,11 +515,11 @@ def set_metadata( # pylint: disable=inconsistent-return-statements _request = build_set_metadata_request( url=self._config.url, + version=self._config.version, timeout=timeout, metadata=metadata, request_id_parameter=request_id_parameter, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -586,10 +583,10 @@ def get_access_policy( _request = build_get_access_policy_request( url=self._config.url, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -672,11 +669,11 @@ def set_access_policy( # pylint: disable=inconsistent-return-statements _request = build_set_access_policy_request( url=self._config.url, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, comp=comp, content_type=content_type, - version=self._config.version, content=_content, headers=_headers, params=_params, diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/operations/_service_operations.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/operations/_service_operations.py index d496d3e1bb4d..b20f5f5d24bd 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/operations/_service_operations.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/operations/_service_operations.py @@ -35,7 +35,13 @@ def build_set_properties_request( - url: str, *, content: Any, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any + url: str, + *, + content: Any, + version: str, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) @@ -43,7 +49,6 @@ def build_set_properties_request( restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service")) comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -72,14 +77,13 @@ def build_set_properties_request( def build_get_properties_request( - url: str, *, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any + url: str, *, version: str, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service")) comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -106,14 +110,13 @@ def build_get_properties_request( def build_get_statistics_request( - url: str, *, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any + url: str, *, version: str, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service")) comp: Literal["stats"] = kwargs.pop("comp", _params.pop("comp", "stats")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -140,7 +143,13 @@ def build_get_statistics_request( def build_get_user_delegation_key_request( - url: str, *, content: Any, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any + url: str, + *, + content: Any, + version: str, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) @@ -148,7 +157,6 @@ def build_get_user_delegation_key_request( restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service")) comp: Literal["userdelegationkey"] = kwargs.pop("comp", _params.pop("comp", "userdelegationkey")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -179,6 +187,7 @@ def build_get_user_delegation_key_request( def build_list_queues_segment_request( url: str, *, + version: str, prefix: Optional[str] = None, marker: Optional[str] = None, maxresults: Optional[int] = None, @@ -191,7 +200,6 @@ def build_list_queues_segment_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) comp: Literal["list"] = kwargs.pop("comp", _params.pop("comp", "list")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -288,12 +296,12 @@ def set_properties( # pylint: disable=inconsistent-return-statements _request = build_set_properties_request( url=self._config.url, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, restype=restype, comp=comp, content_type=content_type, - version=self._config.version, content=_content, headers=_headers, params=_params, @@ -358,11 +366,11 @@ def get_properties( _request = build_get_properties_request( url=self._config.url, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, restype=restype, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -431,11 +439,11 @@ def get_statistics( _request = build_get_statistics_request( url=self._config.url, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, restype=restype, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -513,12 +521,12 @@ def get_user_delegation_key( _request = build_get_user_delegation_key_request( url=self._config.url, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, restype=restype, comp=comp, content_type=content_type, - version=self._config.version, content=_content, headers=_headers, params=_params, @@ -616,6 +624,7 @@ def list_queues_segment( _request = build_list_queues_segment_request( url=self._config.url, + version=self._config.version, prefix=prefix, marker=marker, maxresults=maxresults, @@ -623,7 +632,6 @@ def list_queues_segment( timeout=timeout, request_id_parameter=request_id_parameter, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_queue_client.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_queue_client.py index 85f7b7c6c93a..7812bffb9602 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_queue_client.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_queue_client.py @@ -119,8 +119,9 @@ def __init__( ) self._message_encode_policy = message_encode_policy or NoEncodePolicy() self._message_decode_policy = message_decode_policy or NoDecodePolicy() - self._client = AzureQueueStorage(self.url, base_url=self.url, pipeline=self._pipeline) - self._client._config.version = get_api_version(api_version) # type: ignore [assignment] + self._client = AzureQueueStorage( + self.url, get_api_version(api_version), base_url=self.url, pipeline=self._pipeline + ) self._configure_encryption(kwargs) def __enter__(self) -> Self: diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_queue_service_client.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_queue_service_client.py index 090b2077aa4e..4c2004122960 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_queue_service_client.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_queue_service_client.py @@ -116,8 +116,9 @@ def __init__( audience=audience, **kwargs, ) - self._client = AzureQueueStorage(self.url, base_url=self.url, pipeline=self._pipeline) - self._client._config.version = get_api_version(api_version) # type: ignore [assignment] + self._client = AzureQueueStorage( + self.url, get_api_version(api_version), base_url=self.url, pipeline=self._pipeline + ) self._configure_encryption(kwargs) def __enter__(self) -> Self: @@ -211,7 +212,13 @@ def from_connection_string( @distributed_trace def get_user_delegation_key( - self, *, expiry: "datetime", start: Optional["datetime"] = None, timeout: Optional[int] = None, **kwargs: Any + self, + *, + expiry: "datetime", + start: Optional["datetime"] = None, + delegated_user_tid: Optional[str] = None, + timeout: Optional[int] = None, + **kwargs: Any, ) -> "UserDelegationKey": """ Obtain a user delegation key for the purpose of signing SAS tokens. @@ -223,6 +230,7 @@ def get_user_delegation_key( :keyword start: A DateTime value. Indicates when the key becomes valid. :paramtype start: Optional[~datetime.datetime] + :keyword str delegated_user_tid: The delegated user tenant id in Entra ID. :keyword int timeout: Sets the server-side timeout for the operation in seconds. For more details see https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. @@ -232,7 +240,11 @@ def get_user_delegation_key( :return: The user delegation key. :rtype: ~azure.storage.queue.UserDelegationKey """ - key_info = KeyInfo(start=_to_utc_datetime(start), expiry=_to_utc_datetime(expiry)) # type: ignore + key_info = KeyInfo( + start=_to_utc_datetime(start), # type: ignore [arg-type] + expiry=_to_utc_datetime(expiry), + delegated_user_tid=delegated_user_tid, + ) try: user_delegation_key = self._client.service.get_user_delegation_key( key_info=key_info, timeout=timeout, **kwargs diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_serialize.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_serialize.py index c6b023ba27e8..3a9b86f7b195 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_serialize.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_serialize.py @@ -34,6 +34,7 @@ "2025-07-05", "2025-11-05", "2026-02-06", + "2026-04-06", ] diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/models.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/models.py index a3c2fd2740cc..aa900a4f404a 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/models.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/models.py @@ -89,6 +89,8 @@ class StorageErrorCode(str, Enum, metaclass=CaseInsensitiveEnumMeta): COPY_ID_MISMATCH = "CopyIdMismatch" FEATURE_VERSION_MISMATCH = "FeatureVersionMismatch" INCREMENTAL_COPY_BLOB_MISMATCH = "IncrementalCopyBlobMismatch" + INCREMENTAL_COPY_OF_EARLIER_SNAPSHOT_NOT_ALLOWED = "IncrementalCopyOfEarlierSnapshotNotAllowed" + #: Deprecated: Please use INCREMENTAL_COPY_OF_EARLIER_SNAPSHOT_NOT_ALLOWED instead. INCREMENTAL_COPY_OF_EARLIER_VERSION_SNAPSHOT_NOT_ALLOWED = "IncrementalCopyOfEarlierVersionSnapshotNotAllowed" #: Deprecated: Please use INCREMENTAL_COPY_OF_EARLIER_VERSION_SNAPSHOT_NOT_ALLOWED instead. INCREMENTAL_COPY_OF_ERALIER_VERSION_SNAPSHOT_NOT_ALLOWED = "IncrementalCopyOfEarlierVersionSnapshotNotAllowed" @@ -153,11 +155,15 @@ class StorageErrorCode(str, Enum, metaclass=CaseInsensitiveEnumMeta): # File values CANNOT_DELETE_FILE_OR_DIRECTORY = "CannotDeleteFileOrDirectory" CLIENT_CACHE_FLUSH_DELAY = "ClientCacheFlushDelay" + CONTAINER_QUOTA_DOWNGRADE_NOT_ALLOWED = "ContainerQuotaDowngradeNotAllowed" DELETE_PENDING = "DeletePending" DIRECTORY_NOT_EMPTY = "DirectoryNotEmpty" FILE_LOCK_CONFLICT = "FileLockConflict" FILE_SHARE_PROVISIONED_BANDWIDTH_DOWNGRADE_NOT_ALLOWED = "FileShareProvisionedBandwidthDowngradeNotAllowed" + FILE_SHARE_PROVISIONED_BANDWIDTH_INVALID = "FileShareProvisionedBandwidthInvalid" FILE_SHARE_PROVISIONED_IOPS_DOWNGRADE_NOT_ALLOWED = "FileShareProvisionedIopsDowngradeNotAllowed" + FILE_SHARE_PROVISIONED_IOPS_INVALID = "FileShareProvisionedIopsInvalid" + FILE_SHARE_PROVISIONED_STORAGE_INVALID = "FileShareProvisionedStorageInvalid" INVALID_FILE_OR_DIRECTORY_PATH_NAME = "InvalidFileOrDirectoryPathName" PARENT_NOT_FOUND = "ParentNotFound" READ_ONLY_ATTRIBUTE = "ReadOnlyAttribute" @@ -171,7 +177,10 @@ class StorageErrorCode(str, Enum, metaclass=CaseInsensitiveEnumMeta): SHARE_SNAPSHOT_NOT_FOUND = "ShareSnapshotNotFound" SHARE_SNAPSHOT_OPERATION_NOT_SUPPORTED = "ShareSnapshotOperationNotSupported" SHARE_HAS_SNAPSHOTS = "ShareHasSnapshots" - CONTAINER_QUOTA_DOWNGRADE_NOT_ALLOWED = "ContainerQuotaDowngradeNotAllowed" + TOTAL_SHARES_PROVISIONED_CAPACITY_EXCEEDS_ACCOUNT_LIMIT = "TotalSharesProvisionedCapacityExceedsAccountLimit" + TOTAL_SHARES_PROVISIONED_IOPS_EXCEEDS_ACCOUNT_LIMIT = "TotalSharesProvisionedIopsExceedsAccountLimit" + TOTAL_SHARES_PROVISIONED_BANDWIDTH_EXCEEDS_ACCOUNT_LIMIT = "TotalSharesProvisionedBandwidthExceedsAccountLimit" + TOTAL_SHARES_COUNT_EXCEEDS_ACCOUNT_LIMIT = "TotalSharesCountExceedsAccountLimit" # DataLake values CONTENT_LENGTH_MUST_BE_ZERO = "ContentLengthMustBeZero" @@ -517,6 +526,8 @@ class UserDelegationKey(object): """Object ID of this token.""" signed_tid: Optional[str] = None """Tenant ID of the tenant that issued this token.""" + signed_delegated_user_tid: Optional[str] = None + """User Tenant ID of this token.""" signed_start: Optional[str] = None """The datetime this token becomes valid.""" signed_expiry: Optional[str] = None @@ -531,6 +542,7 @@ class UserDelegationKey(object): def __init__(self): self.signed_oid = None self.signed_tid = None + self.signed_delegated_user_tid = None self.signed_start = None self.signed_expiry = None self.signed_service = None diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/response_handlers.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/response_handlers.py index 705af63003d2..40faa840cfbf 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/response_handlers.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/response_handlers.py @@ -204,6 +204,7 @@ def parse_to_internal_user_delegation_key(service_user_delegation_key): internal_user_delegation_key = UserDelegationKey() internal_user_delegation_key.signed_oid = service_user_delegation_key.signed_oid internal_user_delegation_key.signed_tid = service_user_delegation_key.signed_tid + internal_user_delegation_key.signed_delegated_user_tid = service_user_delegation_key.signed_delegated_user_tid internal_user_delegation_key.signed_start = _to_utc_datetime(service_user_delegation_key.signed_start) internal_user_delegation_key.signed_expiry = _to_utc_datetime(service_user_delegation_key.signed_expiry) internal_user_delegation_key.signed_service = service_user_delegation_key.signed_service diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/shared_access_signature.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/shared_access_signature.py index 3bc715ea845b..b8582a8f71f4 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/shared_access_signature.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/shared_access_signature.py @@ -42,6 +42,8 @@ class QueryStringConstants(object): SIGNED_KEY_SERVICE = "sks" SIGNED_KEY_VERSION = "skv" SIGNED_ENCRYPTION_SCOPE = "ses" + SIGNED_REQUEST_HEADERS = "srh" + SIGNED_REQUEST_QUERY_PARAMS = "srq" SIGNED_KEY_DELEGATED_USER_TID = "skdutid" SIGNED_DELEGATED_USER_OID = "sduoid" @@ -81,6 +83,8 @@ def to_list(): QueryStringConstants.SIGNED_KEY_SERVICE, QueryStringConstants.SIGNED_KEY_VERSION, QueryStringConstants.SIGNED_ENCRYPTION_SCOPE, + QueryStringConstants.SIGNED_REQUEST_HEADERS, + QueryStringConstants.SIGNED_REQUEST_QUERY_PARAMS, QueryStringConstants.SIGNED_KEY_DELEGATED_USER_TID, QueryStringConstants.SIGNED_DELEGATED_USER_OID, # for ADLS @@ -186,6 +190,10 @@ def __init__(self): self.query_dict = {} self.string_to_sign = "" + # STS-only values for dynamic user delegation SAS + self._sts_srh = "" # newline-delimited "k:v" + trailing newline (or empty) + self._sts_srq = "" # newline-delimited "k:v" + leading newline (or empty) + def _add_query(self, name, val): if val: self.query_dict[name] = str(val) if val is not None else None @@ -226,6 +234,28 @@ def add_override_response_headers( self._add_query(QueryStringConstants.SIGNED_CONTENT_LANGUAGE, content_language) self._add_query(QueryStringConstants.SIGNED_CONTENT_TYPE, content_type) + def add_request_headers(self, request_headers): + if not request_headers: + return + + # String-to-Sign (not encoded): "k1:v1\nk2:v2\n...kn:vn\n" + self._sts_srh = "\n".join([f"{k}:{v}" for k, v in request_headers.items()]) + "\n" + + # SAS query param: comma-separated list of encoded header keys only + srh_keys = ",".join([url_quote(k) for k in request_headers.keys()]) + self._add_query(QueryStringConstants.SIGNED_REQUEST_HEADERS, srh_keys) + + def add_request_query_params(self, request_query_params): + if not request_query_params: + return + + # String-to-Sign (not encoded): "k1:v1\nk2:v2\n...kn:vn\n" + self._sts_srq = "\n" + "\n".join([f"{k}:{v}" for k, v in request_query_params.items()]) + + # SAS query param: comma-separated list of encoded query-param keys only + srq_keys = ",".join([url_quote(k) for k in request_query_params.keys()]) + self._add_query(QueryStringConstants.SIGNED_REQUEST_QUERY_PARAMS, srq_keys) + def add_account_signature(self, account_name, account_key): def get_value_to_append(query): return_value = self.query_dict.get(query) or "" diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_shared_access_signature.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_shared_access_signature.py index 368f76def122..465835d6ad69 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_shared_access_signature.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_shared_access_signature.py @@ -153,6 +153,9 @@ def get_value_to_append(query): self._add_query(QueryStringConstants.SIGNED_KEY_EXPIRY, user_delegation_key.signed_expiry) self._add_query(QueryStringConstants.SIGNED_KEY_SERVICE, user_delegation_key.signed_service) self._add_query(QueryStringConstants.SIGNED_KEY_VERSION, user_delegation_key.signed_version) + self._add_query( + QueryStringConstants.SIGNED_KEY_DELEGATED_USER_TID, user_delegation_key.signed_delegated_user_tid + ) string_to_sign += ( get_value_to_append(QueryStringConstants.SIGNED_OID) diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/aio/_queue_client_async.py b/sdk/storage/azure-storage-queue/azure/storage/queue/aio/_queue_client_async.py index 6ba0cf3c2827..5134ea26a5ad 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/aio/_queue_client_async.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/aio/_queue_client_async.py @@ -132,8 +132,9 @@ def __init__( self._message_encode_policy = message_encode_policy or NoEncodePolicy() self._message_decode_policy = message_decode_policy or NoDecodePolicy() - self._client = AzureQueueStorage(self.url, base_url=self.url, pipeline=self._pipeline, loop=loop) - self._client._config.version = get_api_version(api_version) # type: ignore [assignment] + self._client = AzureQueueStorage( + self.url, get_api_version(api_version), base_url=self.url, pipeline=self._pipeline, loop=loop + ) self._loop = loop self._configure_encryption(kwargs) diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/aio/_queue_service_client_async.py b/sdk/storage/azure-storage-queue/azure/storage/queue/aio/_queue_service_client_async.py index 228cd2af94c5..dfb65bfccd3a 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/aio/_queue_service_client_async.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/aio/_queue_service_client_async.py @@ -115,8 +115,9 @@ def __init__( audience=audience, **kwargs, ) - self._client = AzureQueueStorage(self.url, base_url=self.url, pipeline=self._pipeline, loop=loop) - self._client._config.version = get_api_version(api_version) # type: ignore [assignment] + self._client = AzureQueueStorage( + self.url, get_api_version(api_version), base_url=self.url, pipeline=self._pipeline, loop=loop + ) self._loop = loop self._configure_encryption(kwargs) @@ -208,7 +209,13 @@ def from_connection_string( @distributed_trace_async async def get_user_delegation_key( - self, *, expiry: "datetime", start: Optional["datetime"] = None, timeout: Optional[int] = None, **kwargs: Any + self, + *, + expiry: "datetime", + start: Optional["datetime"] = None, + delegated_user_tid: Optional[str] = None, + timeout: Optional[int] = None, + **kwargs: Any, ) -> "UserDelegationKey": """ Obtain a user delegation key for the purpose of signing SAS tokens. @@ -220,6 +227,7 @@ async def get_user_delegation_key( :keyword start: A DateTime value. Indicates when the key becomes valid. :paramtype start: Optional[~datetime.datetime] + :keyword str delegated_user_tid: The delegated user tenant id in Entra ID. :keyword int timeout: Sets the server-side timeout for the operation in seconds. For more details see https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. @@ -229,7 +237,11 @@ async def get_user_delegation_key( :return: The user delegation key. :rtype: ~azure.storage.queue.UserDelegationKey """ - key_info = KeyInfo(start=_to_utc_datetime(start), expiry=_to_utc_datetime(expiry)) # type: ignore + key_info = KeyInfo( + start=_to_utc_datetime(start), # type: ignore [arg-type] + expiry=_to_utc_datetime(expiry), + delegated_user_tid=delegated_user_tid, + ) try: user_delegation_key = await self._client.service.get_user_delegation_key( key_info=key_info, timeout=timeout, **kwargs diff --git a/sdk/storage/azure-storage-queue/setup.py b/sdk/storage/azure-storage-queue/setup.py index 36bcfbe34afc..ac028b43a7a5 100644 --- a/sdk/storage/azure-storage-queue/setup.py +++ b/sdk/storage/azure-storage-queue/setup.py @@ -69,10 +69,10 @@ ] ), python_requires=">=3.9", - install_requires=["azure-core>=1.30.0", "cryptography>=2.1.4", "typing-extensions>=4.6.0", "isodate>=0.6.1"], + install_requires=["azure-core>=1.37.0", "cryptography>=2.1.4", "typing-extensions>=4.6.0", "isodate>=0.6.1"], extras_require={ "aio": [ - "azure-core[aio]>=1.30.0", + "azure-core[aio]>=1.37.0", ], }, ) diff --git a/sdk/storage/azure-storage-queue/swagger/README.md b/sdk/storage/azure-storage-queue/swagger/README.md index 4ba30cd24a20..ea65326c9c2a 100644 --- a/sdk/storage/azure-storage-queue/swagger/README.md +++ b/sdk/storage/azure-storage-queue/swagger/README.md @@ -19,7 +19,7 @@ autorest --use=C:/work/autorest.python --version=2.0.4280 ### Settings ``` yaml -input-file: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/storage/data-plane/Microsoft.QueueStorage/stable/2026-02-06/queue.json +input-file: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/storage/data-plane/Microsoft.QueueStorage/stable/2026-04-06/queue.json output-folder: ../azure/storage/queue/_generated namespace: azure.storage.queue no-namespace-folders: true diff --git a/sdk/storage/azure-storage-queue/tests/test_queue.py b/sdk/storage/azure-storage-queue/tests/test_queue.py index c9aabaaadf36..b010159d7490 100644 --- a/sdk/storage/azure-storage-queue/tests/test_queue.py +++ b/sdk/storage/azure-storage-queue/tests/test_queue.py @@ -1465,18 +1465,24 @@ def test_get_user_delegation_sas(self, **kwargs): @pytest.mark.live_test_only @QueuePreparer() - def test_queue_user_delegation_oid(self, **kwargs): + def test_queue_cross_tenant_sas(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - message = "addedmessage" token_credential = self.get_credential(QueueServiceClient) qsc = QueueServiceClient(self.account_url(storage_account_name, "queue"), credential=token_credential) start = datetime.utcnow() expiry = datetime.utcnow() + timedelta(hours=1) - user_delegation_key = qsc.get_user_delegation_key(start=start, expiry=expiry) token = token_credential.get_token("https://storage.azure.com/.default") - user_delegation_oid = jwt.decode(token.token, options={"verify_signature": False}).get("oid") + decoded = jwt.decode(token.token, options={"verify_signature": False}) + user_delegation_oid = decoded.get("oid") + delegated_user_tid = decoded.get("tid") + user_delegation_key = qsc.get_user_delegation_key( + start=start, expiry=expiry, delegated_user_tid=delegated_user_tid + ) + + assert user_delegation_key is not None + assert user_delegation_key.signed_delegated_user_tid == delegated_user_tid queue_name = self.get_resource_name(TEST_QUEUE_PREFIX) queue = qsc.get_queue_client(queue_name) @@ -1493,7 +1499,11 @@ def test_queue_user_delegation_oid(self, **kwargs): user_delegation_oid=user_delegation_oid, ) + assert "sduoid=" + user_delegation_oid in queue_token + assert "skdutid=" + delegated_user_tid in queue_token + queue_client = QueueClient.from_queue_url(queue_url=f"{queue.url}?{queue_token}", credential=token_credential) + message = "addedmessage" queue_msg = queue_client.send_message(message) assert queue_msg is not None diff --git a/sdk/storage/azure-storage-queue/tests/test_queue_async.py b/sdk/storage/azure-storage-queue/tests/test_queue_async.py index 3ff3bac1c53d..c79124c5ab6f 100644 --- a/sdk/storage/azure-storage-queue/tests/test_queue_async.py +++ b/sdk/storage/azure-storage-queue/tests/test_queue_async.py @@ -1483,18 +1483,24 @@ async def test_get_user_delegation_sas(self, **kwargs): @pytest.mark.live_test_only @QueuePreparer() - async def test_queue_user_delegation_oid(self, **kwargs): + async def test_queue_cross_tenant_sas(self, **kwargs): storage_account_name = kwargs.pop("storage_account_name") storage_account_key = kwargs.pop("storage_account_key") - message = "addedmessage" token_credential = self.get_credential(QueueServiceClient, is_async=True) qsc = QueueServiceClient(self.account_url(storage_account_name, "queue"), credential=token_credential) start = datetime.utcnow() expiry = datetime.utcnow() + timedelta(hours=1) - user_delegation_key = await qsc.get_user_delegation_key(start=start, expiry=expiry) token = await token_credential.get_token("https://storage.azure.com/.default") - user_delegation_oid = jwt.decode(token.token, options={"verify_signature": False}).get("oid") + decoded = jwt.decode(token.token, options={"verify_signature": False}) + user_delegation_oid = decoded.get("oid") + delegated_user_tid = decoded.get("tid") + user_delegation_key = await qsc.get_user_delegation_key( + start=start, expiry=expiry, delegated_user_tid=delegated_user_tid + ) + + assert user_delegation_key is not None + assert user_delegation_key.signed_delegated_user_tid == delegated_user_tid queue_name = self.get_resource_name(TEST_QUEUE_PREFIX) queue = qsc.get_queue_client(queue_name) @@ -1511,7 +1517,11 @@ async def test_queue_user_delegation_oid(self, **kwargs): user_delegation_oid=user_delegation_oid, ) + assert "sduoid=" + user_delegation_oid in queue_token + assert "skdutid=" + delegated_user_tid in queue_token + queue_client = QueueClient.from_queue_url(queue_url=f"{queue.url}?{queue_token}", credential=token_credential) + message = "addedmessage" queue_msg = await queue_client.send_message(message) assert queue_msg is not None From d1955dd2680b79276b1b775adfc35ee08110c9ae Mon Sep 17 00:00:00 2001 From: Paul Van Eck Date: Mon, 26 Jan 2026 11:59:44 -0800 Subject: [PATCH 16/18] [Identity] TSG update for token binding (#44789) * [Identity] TSG update for token binding Signed-off-by: Paul Van Eck * Update formatting Signed-off-by: Paul Van Eck --------- Signed-off-by: Paul Van Eck --- sdk/identity/azure-identity/TROUBLESHOOTING.md | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/sdk/identity/azure-identity/TROUBLESHOOTING.md b/sdk/identity/azure-identity/TROUBLESHOOTING.md index a8521ccaf28b..148f2e0544da 100644 --- a/sdk/identity/azure-identity/TROUBLESHOOTING.md +++ b/sdk/identity/azure-identity/TROUBLESHOOTING.md @@ -276,7 +276,13 @@ Get-AzAccessToken -ResourceUrl "https://management.core.windows.net" | Error Message |Description| Mitigation | |---|---|---| -|WorkloadIdentityCredential authentication unavailable. The workload options are not fully configured|The `WorkloadIdentityCredential` requires `client_id`, `tenant_id` and `token_file_path` to authenticate with Microsoft Entra ID.|
  • If using `DefaultAzureCredential` then:
    • Ensure client ID is specified via the `workload_identity_client_id` keyword argument or the `AZURE_CLIENT_ID` env variable.
    • Ensure tenant ID is specified via the `AZURE_TENANT_ID` env variable.
    • Ensure token file path is specified via `AZURE_FEDERATED_TOKEN_FILE` env variable.
    • Ensure authority host is specified via `AZURE_AUTHORITY_HOST` env variable.
  • If using `WorkloadIdentityCredential` then:
    • Ensure tenant ID is specified via the `tenant_id` keyword argument or the `AZURE_TENANT_ID` env variable.
    • Ensure client ID is specified via the `client_id` keyword argument or the `AZURE_CLIENT_ID` env variable.
    • Ensure token file path is specified via the `token_file_path` keyword argument or the `AZURE_FEDERATED_TOKEN_FILE` environment variable.
  • Consult the [product troubleshooting guide](https://azure.github.io/azure-workload-identity/docs/troubleshooting.html) for other issues.
+|WorkloadIdentityCredential authentication unavailable. The workload options are not fully configured|The `WorkloadIdentityCredential` requires `client_id`, `tenant_id` and `token_file_path` to authenticate with Microsoft Entra ID.|
  • If using `DefaultAzureCredential` then:
    • Ensure client ID is specified via the `workload_identity_client_id` keyword argument or the `AZURE_CLIENT_ID` env variable.
    • Ensure tenant ID is specified via the `AZURE_TENANT_ID` env variable.
    • Ensure token file path is specified via `AZURE_FEDERATED_TOKEN_FILE` env variable.
    • Ensure authority host is specified via `AZURE_AUTHORITY_HOST` env variable.
  • If using `WorkloadIdentityCredential` then:
    • Ensure tenant ID is specified via the `tenant_id` keyword argument or the `AZURE_TENANT_ID` env variable.
    • Ensure client ID is specified via the `client_id` keyword argument or the `AZURE_CLIENT_ID` env variable.
    • Ensure token file path is specified via the `token_file_path` keyword argument or the `AZURE_FEDERATED_TOKEN_FILE` environment variable.
  • Consult the [product troubleshooting guide](https://azure.github.io/azure-workload-identity/docs/troubleshooting.html) for other issues.
| + +#### `ClientAuthenticationError` for applications using [Azure Kubernetes Service identity bindings](https://learn.microsoft.com/azure/aks/identity-bindings-concepts) + +| Error Message |Description| Mitigation | +|---|---|---| +|
  • AADSTS700211: No matching federated identity record found for presented assertion issuer ...
  • AADSTS700212: No matching federated identity record found for presented assertion audience 'api://AKSIdentityBinding'.
|`WorkloadIdentityCredential` isn't configured to use the identity binding proxy|Set the `enable_azure_proxy` keyword argument to `True` when creating `WorkloadIdentityCredential`. Note that identity binding mode isn't supported when `WorkloadIdentityCredential` is used via `DefaultAzureCredential`. `WorkloadIdentityCredential` should be used directly in this scenario.| ## Troubleshoot `AzurePipelinesCredential` authentication issues From 0387213dd7e439f73162409f31a2d3091e4fd1fc Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Mon, 26 Jan 2026 21:49:56 +0000 Subject: [PATCH 17/18] Initial plan From a8c006647e50aec956ccab74cca2b9e7173ad393 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Mon, 26 Jan 2026 21:58:04 +0000 Subject: [PATCH 18/18] Add placeholder files for _operations to fix import structure Co-authored-by: l0lawrence <100643745+l0lawrence@users.noreply.github.com> --- .../storage/blobs/_operations/_operations.py | 0 .../azure/storage/blobs/_operations/_patch.py | 15 +++++++++++++++ 2 files changed, 15 insertions(+) create mode 100644 sdk/storage/azure-storage-blob/azure/storage/blob/_generated/azure/storage/blobs/_operations/_operations.py create mode 100644 sdk/storage/azure-storage-blob/azure/storage/blob/_generated/azure/storage/blobs/_operations/_patch.py diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/azure/storage/blobs/_operations/_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/azure/storage/blobs/_operations/_operations.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/azure/storage/blobs/_operations/_patch.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/azure/storage/blobs/_operations/_patch.py new file mode 100644 index 000000000000..7c2a7b429694 --- /dev/null +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/azure/storage/blobs/_operations/_patch.py @@ -0,0 +1,15 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +# This file is used for handwritten extensions to the generated code. Example: +# https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/customize_code/how-to-patch-sdk-code.md +def patch_sdk(): + pass + + +__all__ = []