From 0ae6a0aa36dd900fcfdd67b1a868a1d88c6fb964 Mon Sep 17 00:00:00 2001 From: Brynn Yin <24237253+brynn-code@users.noreply.github.com> Date: Thu, 21 Mar 2024 19:29:42 +0800 Subject: [PATCH] Extract clean workspace connection provider (#2411) # Description Please add an informative description that covers that changes made by the pull request and link all relevant issues. # All Promptflow Contribution checklist: - [ ] **The pull request does not introduce [breaking changes].** - [ ] **CHANGELOG is updated for new features, bug fixes or other significant changes.** - [ ] **I have read the [contribution guidelines](../CONTRIBUTING.md).** - [ ] **Create an issue and link to the pull request to get dedicated review from promptflow team. Learn more: [suggested workflow](../CONTRIBUTING.md#suggested-workflow).** ## General Guidelines and Best Practices - [ ] Title of the pull request is clear and informative. - [ ] There are a small number of commits, each of which have an informative message. This means that previously merged commits do not appear in the history of the PR. For more information on cleaning up the commits in your PR, [see this page](https://github.com/Azure/azure-powershell/blob/master/documentation/development-docs/cleaning-up-commits.md). ### Testing Guidelines - [ ] Pull request includes test coverage for the included changes. --------- Signed-off-by: Brynn Yin --- .cspell.json | 1 + .pre-commit-config.yaml | 2 +- setup.cfg | 1 + .../promptflow/_sdk/entities/_connection.py | 31 +- .../promptflow/azure/_utils/general.py | 32 +- .../operations/_arm_connection_operations.py | 323 +- src/promptflow/promptflow/core/_connection.py | 37 +- .../core/_connection_provider/__init__.py | 5 + .../_connection_provider.py | 17 + .../_dict_connection_provider.py | 82 + .../_local_connection_provider.py | 14 + .../_connection_provider}/_models/__init__.py | 0 .../_connection_provider}/_models/_models.py | 19178 +++++++--------- .../_connection_provider}/_models/_version.py | 0 .../core/_connection_provider/_utils.py | 48 + .../_workspace_connection_provider.py | 303 + src/promptflow/promptflow/core/_errors.py | 48 + .../_serving/extension/azureml_extension.py | 6 +- .../test_arm_connection_operations.py | 22 +- .../unittests/test_arm_connection_build.py | 6 +- src/promptflow/tests/sdk_cli_test/.coveragerc | 3 + src/promptflow/tests/sdk_pfs_test/.coveragerc | 2 + 22 files changed, 9205 insertions(+), 10956 deletions(-) create mode 100644 src/promptflow/promptflow/core/_connection_provider/__init__.py create mode 100644 src/promptflow/promptflow/core/_connection_provider/_connection_provider.py create mode 100644 src/promptflow/promptflow/core/_connection_provider/_dict_connection_provider.py create mode 100644 src/promptflow/promptflow/core/_connection_provider/_local_connection_provider.py rename src/promptflow/promptflow/{azure => core/_connection_provider}/_models/__init__.py (100%) rename src/promptflow/promptflow/{azure => core/_connection_provider}/_models/_models.py (68%) rename src/promptflow/promptflow/{azure => core/_connection_provider}/_models/_version.py (100%) create mode 100644 src/promptflow/promptflow/core/_connection_provider/_utils.py create mode 100644 src/promptflow/promptflow/core/_connection_provider/_workspace_connection_provider.py diff --git a/.cspell.json b/.cspell.json index e9f5b8f1bd3..91b02115b2a 100644 --- a/.cspell.json +++ b/.cspell.json @@ -28,6 +28,7 @@ "src/promptflow/promptflow/azure/_restclient/flow/**", "src/promptflow/promptflow/azure/_restclient/swagger.json", "src/promptflow/promptflow/azure/_models/**", + "src/promptflow/promptflow/core/_connection_provider/_models/**", "src/promptflow/tests/**", "src/promptflow-tools/tests/**", "**/flow.dag.yaml", diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 7637042d9d5..c66a59b8653 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,7 +1,7 @@ # See https://pre-commit.com for more information # See https://pre-commit.com/hooks.html for more hooks -exclude: '(^docs/)|flows|scripts|src/promptflow/promptflow/azure/_restclient/|src/promptflow/promptflow/azure/_models/|src/promptflow/tests/test_configs|src/promptflow-tools' +exclude: '(^docs/)|flows|scripts|src/promptflow/promptflow/azure/_restclient/|src/promptflow/promptflow/core/_connection_provider/_models/|src/promptflow/promptflow/azure/_models/|src/promptflow/tests/test_configs|src/promptflow-tools' repos: - repo: https://github.com/pre-commit/pre-commit-hooks diff --git a/setup.cfg b/setup.cfg index 74a0db43987..bf51b4b62f8 100644 --- a/setup.cfg +++ b/setup.cfg @@ -13,6 +13,7 @@ exclude = build src/promptflow/promptflow/azure/_restclient src/promptflow/promptflow/azure/_models + src/promptflow/promptflow/core/_connection_provider/_models src/promptflow/tests/test_configs/* import-order-style = google diff --git a/src/promptflow/promptflow/_sdk/entities/_connection.py b/src/promptflow/promptflow/_sdk/entities/_connection.py index 3ac71f0d401..7b3cfda28d5 100644 --- a/src/promptflow/promptflow/_sdk/entities/_connection.py +++ b/src/promptflow/promptflow/_sdk/entities/_connection.py @@ -66,6 +66,8 @@ class _Connection(_CoreConnection, YAMLTranslatableMixin): + SUPPORTED_TYPES = {} + @classmethod def _casting_type(cls, typ): type_dict = { @@ -133,10 +135,11 @@ def _resolve_cls_and_type(cls, data, params_override=None): if type_str is None: raise ValidationException("type is required for connection.") type_str = cls._casting_type(type_str) - type_cls = _supported_types.get(type_str) + type_cls = cls.SUPPORTED_TYPES.get(type_str) if type_cls is None: raise ValidationException( - f"connection_type {type_str!r} is not supported. Supported types are: {list(_supported_types.keys())}" + f"Connection type {type_str!r} is not supported. " + f"Supported types are: {list(cls.SUPPORTED_TYPES.keys())}" ) return type_cls, type_str @@ -208,26 +211,6 @@ def _load( ) return connection - def _to_execution_connection_dict(self) -> dict: - value = {**self.configs, **self.secrets} - secret_keys = list(self.secrets.keys()) - return { - "type": self.class_name, # Required class name for connection in executor - "module": self.module, - "value": {k: v for k, v in value.items() if v is not None}, # Filter None value out - "secret_keys": secret_keys, - } - - @classmethod - def _from_execution_connection_dict(cls, name, data) -> "_Connection": - type_cls, _ = cls._resolve_cls_and_type(data={"type": data.get("type")[: -len("Connection")]}) - value_dict = data.get("value", {}) - if type_cls == CustomConnection: - secrets = {k: v for k, v in value_dict.items() if k in data.get("secret_keys", [])} - configs = {k: v for k, v in value_dict.items() if k not in secrets} - return CustomConnection(name=name, configs=configs, secrets=secrets) - return type_cls(name=name, **value_dict) - def _get_scrubbed_secrets(self): """Return the scrubbed secrets of connection.""" return {key: val for key, val in self.secrets.items() if self._is_scrubbed_value(val)} @@ -550,7 +533,9 @@ def _from_mt_rest_object(cls, mt_rest_obj): ) -_supported_types = { +# Note: Do not import this from core connection. +# As we need the class here. +_Connection.SUPPORTED_TYPES = { v.TYPE: v for v in globals().values() if isinstance(v, type) and issubclass(v, _Connection) and not v.__name__.startswith("_") diff --git a/src/promptflow/promptflow/azure/_utils/general.py b/src/promptflow/promptflow/azure/_utils/general.py index 7196dc1e4dc..8785801dd7f 100644 --- a/src/promptflow/promptflow/azure/_utils/general.py +++ b/src/promptflow/promptflow/azure/_utils/general.py @@ -4,33 +4,16 @@ import jwt -from promptflow.exceptions import ValidationException +from promptflow.core._connection_provider._utils import get_arm_token, get_token def is_arm_id(obj) -> bool: return isinstance(obj, str) and obj.startswith("azureml://") -def get_token(credential, resource) -> str: - from azure.ai.ml._azure_environments import _resource_to_scopes - - azure_ml_scopes = _resource_to_scopes(resource) - token = credential.get_token(*azure_ml_scopes).token - # validate token has aml audience - decoded_token = jwt.decode( - token, - options={"verify_signature": False, "verify_aud": False}, - ) - if decoded_token.get("aud") != resource: - msg = """AAD token with aml scope could not be fetched using the credentials being used. - Please validate if token with {0} scope can be fetched using credentials provided to PFClient. - Token with {0} scope can be fetched using credentials.get_token({0}) - """ - raise ValidationException( - message=msg.format(*azure_ml_scopes), - ) - - return token +# Add for backward compitability +get_token = get_token +get_arm_token = get_arm_token def get_aml_token(credential) -> str: @@ -40,13 +23,6 @@ def get_aml_token(credential) -> str: return get_token(credential, resource) -def get_arm_token(credential) -> str: - from azure.ai.ml._azure_environments import _get_base_url_from_metadata - - resource = _get_base_url_from_metadata() - return get_token(credential, resource) - - def get_authorization(credential=None) -> str: token = get_arm_token(credential=credential) return "Bearer " + token diff --git a/src/promptflow/promptflow/azure/operations/_arm_connection_operations.py b/src/promptflow/promptflow/azure/operations/_arm_connection_operations.py index 623533d7f96..81ec71b46c6 100644 --- a/src/promptflow/promptflow/azure/operations/_arm_connection_operations.py +++ b/src/promptflow/promptflow/azure/operations/_arm_connection_operations.py @@ -1,59 +1,17 @@ # --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- -from typing import Any, Dict, Union +from typing import Dict -import requests from azure.ai.ml._scope_dependent_operations import ( OperationConfig, OperationsContainer, OperationScope, _ScopeDependentOperations, ) -from azure.core.exceptions import ClientAuthenticationError -from promptflow._constants import ConnectionAuthMode -from promptflow._sdk.entities._connection import CustomConnection, _Connection -from promptflow._utils.retry_utils import http_retry_wrapper -from promptflow.azure._models._models import WorkspaceConnectionPropertiesV2BasicResource from promptflow.azure._restclient.flow_service_caller import FlowServiceCaller -from promptflow.azure._utils.general import get_arm_token -from promptflow.exceptions import ErrorTarget, SystemErrorException, UserErrorException - -GET_CONNECTION_URL = ( - "/subscriptions/{sub}/resourcegroups/{rg}/providers/Microsoft.MachineLearningServices" - "/workspaces/{ws}/connections/{name}/listsecrets?api-version=2023-04-01-preview" -) -LIST_CONNECTION_URL = ( - "/subscriptions/{sub}/resourcegroups/{rg}/providers/Microsoft.MachineLearningServices" - "/workspaces/{ws}/connections?api-version=2023-04-01-preview" -) -FLOW_META_PREFIX = "azureml.flow." - - -# Note: We define the category and auth type here because newly added enum values may -# depend on azure-ai-ml package update, which is not in our control. -class ConnectionCategory: - AzureOpenAI = "AzureOpenAI" - CognitiveSearch = "CognitiveSearch" - CognitiveService = "CognitiveService" - CustomKeys = "CustomKeys" - OpenAI = "OpenAI" - Serp = "Serp" - Serverless = "Serverless" - BingLLMSearch = "BingLLMSearch" - - -class ConnectionAuthType: - ApiKey = "ApiKey" - AAD = "AAD" - - -def get_case_insensitive_key(d, key, default=None): - for k, v in d.items(): - if k.lower() == key.lower(): - return v - return default +from promptflow.core._connection_provider._workspace_connection_provider import WorkspaceConnectionProvider class ArmConnectionOperations(_ScopeDependentOperations): @@ -77,281 +35,24 @@ def __init__( self._all_operations = all_operations self._service_caller = service_caller self._credential = credential - - def get(self, name, **kwargs): - connection_dict = self.build_connection_dict(name) - return _Connection._from_execution_connection_dict(name=name, data=connection_dict) - - @classmethod - def _direct_get(cls, name, subscription_id, resource_group_name, workspace_name, credential): - """ - This method is added for local pf_client with workspace provider to ensure we only require limited - permission(workspace/list secrets). As create azure pf_client requires workspace read permission. - """ - connection_dict = cls._build_connection_dict( - name, subscription_id, resource_group_name, workspace_name, credential - ) - return _Connection._from_execution_connection_dict(name=name, data=connection_dict) - - @classmethod - def open_url(cls, token, url, action, host="management.azure.com", method="GET", model=None) -> Union[Any, dict]: - """ - :type token: str - :type url: str - :type action: str, for the error message format. - :type host: str - :type method: str - :type model: Type[msrest.serialization.Model] - """ - headers = {"Authorization": f"Bearer {token}"} - response = http_retry_wrapper(requests.request)(method, f"https://{host}{url}", headers=headers) - message_format = ( - f"Open url {{url}} failed with status code: {response.status_code}, action: {action}, reason: {{reason}}" - ) - if response.status_code == 403: - raise AccessDeniedError(operation=url, target=ErrorTarget.RUNTIME) - elif 400 <= response.status_code < 500: - raise OpenURLFailedUserError( - message_format=message_format, - url=url, - reason=response.reason, - ) - elif response.status_code != 200: - raise OpenURLFailed( - message_format=message_format, - url=url, - reason=response.reason, - ) - data = response.json() - if model: - return model.deserialize(data) - return data - - @classmethod - def validate_and_fallback_connection_type(cls, name, type_name, category, metadata): - if type_name: - return type_name - # Below category has corresponding connection type in PromptFlow, so we can fall back directly. - # Note: CustomKeys may store different connection types for now, e.g. openai, serp. - if category in [ - ConnectionCategory.AzureOpenAI, - ConnectionCategory.CognitiveSearch, - ConnectionCategory.Serverless, - ]: - return category - if category == ConnectionCategory.CognitiveService: - kind = get_case_insensitive_key(metadata, "Kind") - if kind == "Content Safety": - return "AzureContentSafety" - if kind == "Form Recognizer": - return "FormRecognizer" - raise UnknownConnectionType( - message_format="Connection {name} is not recognized in PromptFlow, " - "please make sure the connection is created in PromptFlow.", - category=category, - name=name, - ) - - @classmethod - def build_connection_dict_from_rest_object(cls, name, obj) -> dict: - """ - :type name: str - :type obj: azure.ai.ml._restclient.v2023_06_01_preview.models.WorkspaceConnectionPropertiesV2BasicResource - """ - # Reference 1: https://msdata.visualstudio.com/Vienna/_git/vienna?path=/src/azureml-api/src/AccountRP/Contracts/WorkspaceConnection/WorkspaceConnectionDtoV2.cs&_a=blame&version=GBmaster # noqa: E501 - # Reference 2: https://msdata.visualstudio.com/Vienna/_git/vienna?path=%2Fsrc%2Fazureml-api%2Fsrc%2FDesigner%2Fsrc%2FMiddleTier%2FMiddleTier%2FServices%2FPromptFlow%2FConnectionsManagement.cs&version=GBmaster&_a=contents # noqa: E501 - # This connection type covers the generic ApiKey auth connection categories, for examples: - # AzureOpenAI: - # Category:= AzureOpenAI - # AuthType:= ApiKey (as type discriminator) - # Credentials:= {ApiKey} as - # Target:= {ApiBase} - # - # CognitiveService: - # Category:= CognitiveService - # AuthType:= ApiKey (as type discriminator) - # Credentials:= {SubscriptionKey} as - # Target:= ServiceRegion={serviceRegion} - # - # CognitiveSearch: - # Category:= CognitiveSearch - # AuthType:= ApiKey (as type discriminator) - # Credentials:= {Key} as - # Target:= {Endpoint} - # - # Use Metadata property bag for ApiType, ApiVersion, Kind and other metadata fields - properties = obj.properties - type_name = get_case_insensitive_key(properties.metadata, f"{FLOW_META_PREFIX}connection_type") - type_name = cls.validate_and_fallback_connection_type(name, type_name, properties.category, properties.metadata) - module = get_case_insensitive_key(properties.metadata, f"{FLOW_META_PREFIX}module", "promptflow.connections") - # Note: Category is connectionType in MT, but type name should be class name, which is flowValueType in MT. - # Handle old connections here, see details: https://github.com/Azure/promptflow/tree/main/connections - type_name = f"{type_name}Connection" if not type_name.endswith("Connection") else type_name - meta = {"type": type_name, "module": module} - - def get_auth_config(props): - unsupported_message = "Unsupported connection auth type %r, supported types are 'ApiKey' and 'AAD'." - if not isinstance(props.auth_type, str): - raise UnsupportedConnectionAuthType(message=unsupported_message % props.auth_type) - if props.auth_type.lower() == ConnectionAuthType.ApiKey.lower(): - return {"api_key": props.credentials.key, "auth_mode": ConnectionAuthMode.KEY} - elif props.auth_type.lower() == ConnectionAuthType.AAD.lower(): - return {"api_key": None, "auth_mode": ConnectionAuthMode.MEID_TOKEN} - raise UnsupportedConnectionAuthType(message=unsupported_message % props.auth_type) - - if properties.category == ConnectionCategory.AzureOpenAI: - value = { - **get_auth_config(properties), - "api_base": properties.target, - "api_type": get_case_insensitive_key(properties.metadata, "ApiType"), - "api_version": get_case_insensitive_key(properties.metadata, "ApiVersion"), - } - # Note: Resource id is required in some cloud scenario, which is not exposed on sdk/cli entity. - resource_id = get_case_insensitive_key(properties.metadata, "ResourceId") - if resource_id: - value["resource_id"] = resource_id - elif properties.category == ConnectionCategory.CognitiveSearch: - value = { - **get_auth_config(properties), - "api_base": properties.target, - "api_version": get_case_insensitive_key(properties.metadata, "ApiVersion"), - } - elif properties.category == ConnectionCategory.Serverless: - value = { - **get_auth_config(properties), - "api_base": properties.target, - } - elif properties.category == ConnectionCategory.CognitiveService: - value = { - **get_auth_config(properties), - "endpoint": properties.target, - "api_version": get_case_insensitive_key(properties.metadata, "ApiVersion"), - } - elif properties.category == ConnectionCategory.CustomKeys: - # Merge secrets from credentials.keys and other string fields from metadata - value = { - **properties.credentials.keys, - **{k: v for k, v in properties.metadata.items() if not k.startswith(FLOW_META_PREFIX)}, - } - if type_name == CustomConnection.__name__: - meta["secret_keys"] = list(properties.credentials.keys.keys()) - else: - raise UnknownConnectionType( - message_format=( - "Unknown connection {name} category {category}, " - "please upgrade your promptflow sdk version and retry." - ), - category=properties.category, - name=name, - ) - # Note: Filter empty values out to ensure default values can be picked when init class object. - return {**meta, "value": {k: v for k, v in value.items() if v}} - - def build_connection_dict(self, name): - return self._build_connection_dict( - name, + self._provider = WorkspaceConnectionProvider( self._operation_scope.subscription_id, self._operation_scope.resource_group_name, self._operation_scope.workspace_name, self._credential, ) - @classmethod - def _convert_to_connection_dict(cls, conn_name, conn_data): - try: - rest_obj = WorkspaceConnectionPropertiesV2BasicResource.deserialize(conn_data) - conn_dict = cls.build_connection_dict_from_rest_object(conn_name, rest_obj) - return conn_dict - except Exception as e: - raise BuildConnectionError( - message_format=f"Build connection dict for connection {{name}} failed with {e}.", - name=conn_name, - ) + def get(self, name, **kwargs): + return self._provider.get(name) @classmethod - def _build_connection_dict(cls, name, subscription_id, resource_group_name, workspace_name, credential) -> dict: + def _direct_get(cls, name, subscription_id, resource_group_name, workspace_name, credential): """ - :type name: str - :type subscription_id: str - :type resource_group_name: str - :type workspace_name: str - :type credential: azure.identity.TokenCredential + This method is added for local pf_client with workspace provider to ensure we only require limited + permission(workspace/list secrets). As create azure pf_client requires workspace read permission. """ - url = GET_CONNECTION_URL.format( - sub=subscription_id, - rg=resource_group_name, - ws=workspace_name, - name=name, - ) - try: - rest_obj: WorkspaceConnectionPropertiesV2BasicResource = cls.open_url( - get_arm_token(credential=credential), - url=url, - action="listsecrets", - method="POST", - model=WorkspaceConnectionPropertiesV2BasicResource, - ) - except AccessDeniedError: - auth_error_message = ( - "Access denied to list workspace secret due to invalid authentication. " - "Please ensure you have gain RBAC role 'Azure Machine Learning Workspace Connection Secrets Reader' " - "for current workspace, and wait for a few minutes to make sure the new role takes effect. " - ) - raise OpenURLUserAuthenticationError(message=auth_error_message) - except ClientAuthenticationError as e: - raise UserErrorException(target=ErrorTarget.CONTROL_PLANE_SDK, message=str(e), error=e) - except UserErrorException as e: # For example: OpenURLFailedUserError - raise e - except Exception as e: - raise SystemErrorException(target=ErrorTarget.CONTROL_PLANE_SDK, message=str(e), error=e) - - try: - return cls.build_connection_dict_from_rest_object(name, rest_obj) - except Exception as e: - raise BuildConnectionError( - message_format=f"Build connection dict for connection {{name}} failed with {e}.", - name=name, - ) - - -class AccessDeniedError(UserErrorException): - """Exception raised when run info can not be found in storage""" - - def __init__(self, operation: str, target: ErrorTarget): - super().__init__(message=f"Access is denied to perform operation {operation!r}", target=target) - - -class OpenURLFailed(SystemErrorException): - def __init__(self, **kwargs): - super().__init__(target=ErrorTarget.CONTROL_PLANE_SDK, **kwargs) - - -class BuildConnectionError(SystemErrorException): - def __init__(self, **kwargs): - super().__init__(target=ErrorTarget.CONTROL_PLANE_SDK, **kwargs) - - -class UserAuthenticationError(UserErrorException): - """Exception raised when user authentication failed""" - - pass - - -class OpenURLUserAuthenticationError(UserAuthenticationError): - def __init__(self, **kwargs): - super().__init__(target=ErrorTarget.CONTROL_PLANE_SDK, **kwargs) - - -class OpenURLFailedUserError(UserErrorException): - def __init__(self, **kwargs): - super().__init__(target=ErrorTarget.CONTROL_PLANE_SDK, **kwargs) - - -class UnknownConnectionType(UserErrorException): - def __init__(self, **kwargs): - super().__init__(target=ErrorTarget.CONTROL_PLANE_SDK, **kwargs) - + provider = WorkspaceConnectionProvider(subscription_id, resource_group_name, workspace_name, credential) + return provider.get(name=name) -class UnsupportedConnectionAuthType(UserErrorException): - def __init__(self, **kwargs): - super().__init__(target=ErrorTarget.CONTROL_PLANE_SDK, **kwargs) + # Keep this as promptflow tools is using this method + _build_connection_dict = WorkspaceConnectionProvider._build_connection_dict diff --git a/src/promptflow/promptflow/core/_connection.py b/src/promptflow/promptflow/core/_connection.py index cc27be7aabc..5717473144b 100644 --- a/src/promptflow/promptflow/core/_connection.py +++ b/src/promptflow/promptflow/core/_connection.py @@ -13,7 +13,7 @@ from promptflow._utils.utils import in_jupyter_notebook from promptflow.contracts.types import Secret from promptflow.core._errors import RequiredEnvironmentVariablesNotSetError -from promptflow.exceptions import UserErrorException +from promptflow.exceptions import UserErrorException, ValidationException logger = LoggerFactory.get_logger(name=__name__) PROMPTFLOW_CONNECTIONS = "promptflow.connections" @@ -34,6 +34,7 @@ class _Connection: :type secrets: Dict[str, str] """ + SUPPORTED_TYPES = {} TYPE = ConnectionType._NOT_SET.value def __init__( @@ -81,6 +82,33 @@ def __getitem__(self, item): # Cant't raise UserErrorException due to the code exit(1) of promptflow._cli._utils.py line 368. raise KeyError(f"Key {item!r} not found in connection {self.name!r}.") + def _to_execution_connection_dict(self) -> dict: + value = {**self.configs, **self.secrets} + secret_keys = list(self.secrets.keys()) + return { + "type": self.class_name, # Required class name for connection in executor + "module": self.module, + "value": {k: v for k, v in value.items() if v is not None}, # Filter None value out + "secret_keys": secret_keys, + } + + @classmethod + def _from_execution_connection_dict(cls, name, data) -> "_Connection": + type_str = data.get("type")[: -len("Connection")] + type_cls = cls.SUPPORTED_TYPES.get(type_str) + if type_cls is None: + raise ValidationException( + f"Connection type {type_str!r} is not supported. " + f"Supported types are: {list(cls.SUPPORTED_TYPES.keys())}" + ) + value_dict = data.get("value", {}) + # Use class name instead of class here, because the class may be _sdk entity. + if type_cls.__name__ == "CustomConnection": + secrets = {k: v for k, v in value_dict.items() if k in data.get("secret_keys", [])} + configs = {k: v for k, v in value_dict.items() if k not in secrets} + return type_cls(name=name, configs=configs, secrets=secrets) + return type_cls(name=name, **value_dict) + class _StrongTypeConnection(_Connection): @property @@ -701,3 +729,10 @@ def _convert_to_custom_strong_type(self, module=None, to_class=None) -> CustomSt connection_instance = custom_defined_connection_class(configs=self.configs, secrets=self.secrets) return connection_instance + + +_Connection.SUPPORTED_TYPES = { + v.TYPE: v + for v in globals().values() + if isinstance(v, type) and issubclass(v, _Connection) and not v.__name__.startswith("_") +} diff --git a/src/promptflow/promptflow/core/_connection_provider/__init__.py b/src/promptflow/promptflow/core/_connection_provider/__init__.py new file mode 100644 index 00000000000..29a4fcd3278 --- /dev/null +++ b/src/promptflow/promptflow/core/_connection_provider/__init__.py @@ -0,0 +1,5 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- + +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/src/promptflow/promptflow/core/_connection_provider/_connection_provider.py b/src/promptflow/promptflow/core/_connection_provider/_connection_provider.py new file mode 100644 index 00000000000..22c39157565 --- /dev/null +++ b/src/promptflow/promptflow/core/_connection_provider/_connection_provider.py @@ -0,0 +1,17 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +from abc import ABC, abstractmethod +from typing import Any + + +class ConnectionProvider(ABC): + @abstractmethod + def get(self, name: str) -> Any: + """Get connection by name.""" + raise NotImplementedError + + @classmethod + def _init_from_env(cls): + """Initialize the connection provider from environment variables.""" + pass diff --git a/src/promptflow/promptflow/core/_connection_provider/_dict_connection_provider.py b/src/promptflow/promptflow/core/_connection_provider/_dict_connection_provider.py new file mode 100644 index 00000000000..2f344f03fa8 --- /dev/null +++ b/src/promptflow/promptflow/core/_connection_provider/_dict_connection_provider.py @@ -0,0 +1,82 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +from dataclasses import fields, is_dataclass +from typing import Any, Dict + +from promptflow._constants import CONNECTION_NAME_PROPERTY, CONNECTION_SECRET_KEYS, CustomStrongTypeConnectionConfigs +from promptflow._utils.utils import try_import +from promptflow.contracts.types import Secret + +from ._connection_provider import ConnectionProvider + + +class DictConnectionProvider(ConnectionProvider): + """Connection provider based on dict, core scenario: cloud submission.""" + + def __init__(self, _dict: Dict[str, dict]): + self._connections_dict = _dict or {} + self._connections = self._build_connections(self._connections_dict) + + @classmethod + def _build_connections(cls, _dict: Dict[str, dict]): + """Build connection dict.""" + from promptflow._core.tools_manager import connections as cls_mapping + + cls.import_requisites(_dict) + connections = {} # key to connection object + for key, connection_dict in _dict.items(): + typ = connection_dict.get("type") + if typ not in cls_mapping: + supported = [key for key in cls_mapping.keys() if not key.startswith("_")] + raise ValueError(f"Unknown connection {key!r} type {typ!r}, supported are {supported}.") + value = connection_dict.get("value", {}) + connection_class = cls_mapping[typ] + + from promptflow.connections import CustomConnection + + if connection_class is CustomConnection: + # Note: CustomConnection definition can not be got, secret keys will be provided in connection dict. + secret_keys = connection_dict.get("secret_keys", []) + secrets = {k: v for k, v in value.items() if k in secret_keys} + configs = {k: v for k, v in value.items() if k not in secrets} + connection_value = connection_class(configs=configs, secrets=secrets, name=key) + if CustomStrongTypeConnectionConfigs.PROMPTFLOW_TYPE_KEY in configs: + connection_value.custom_type = configs[CustomStrongTypeConnectionConfigs.PROMPTFLOW_TYPE_KEY] + else: + """ + Note: Ignore non exists keys of connection class, + because there are some keys just used by UX like resource id, while not used by backend. + """ + if is_dataclass(connection_class): + # Do not delete this branch, as promptflow_vectordb.connections is dataclass type. + cls_fields = {f.name: f for f in fields(connection_class)} + connection_value = connection_class(**{k: v for k, v in value.items() if k in cls_fields}) + secret_keys = [f.name for f in cls_fields.values() if f.type == Secret] + else: + connection_value = connection_class(**{k: v for k, v in value.items()}) + if hasattr(connection_value, "name"): + connection_value.name = key + secrets = getattr(connection_value, "secrets", {}) + secret_keys = list(secrets.keys()) if isinstance(secrets, dict) else [] + # Set secret keys for log scrubbing + setattr(connection_value, CONNECTION_SECRET_KEYS, secret_keys) + # Use this hack to make sure serialization works + setattr(connection_value, CONNECTION_NAME_PROPERTY, key) + connections[key] = connection_value + return connections + + @classmethod + def import_requisites(cls, _dict: Dict[str, dict]): + """Import connection required modules.""" + modules = set() + for key, connection_dict in _dict.items(): + module = connection_dict.get("module") + if module: + modules.add(module) + for module in modules: + # Suppress import error, as we have legacy module promptflow.tools.connections. + try_import(module, f"Import connection module {module!r} failed.", raise_error=False) + + def get(self, name: str) -> Any: + return self._connections.get(name) diff --git a/src/promptflow/promptflow/core/_connection_provider/_local_connection_provider.py b/src/promptflow/promptflow/core/_connection_provider/_local_connection_provider.py new file mode 100644 index 00000000000..510ae1da78c --- /dev/null +++ b/src/promptflow/promptflow/core/_connection_provider/_local_connection_provider.py @@ -0,0 +1,14 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +from ._connection_provider import ConnectionProvider + + +class LocalConnectionProvider(ConnectionProvider): + """Local connection provider.""" + + def __init__(self): + pass + + def get(self, name: str): + pass diff --git a/src/promptflow/promptflow/azure/_models/__init__.py b/src/promptflow/promptflow/core/_connection_provider/_models/__init__.py similarity index 100% rename from src/promptflow/promptflow/azure/_models/__init__.py rename to src/promptflow/promptflow/core/_connection_provider/_models/__init__.py diff --git a/src/promptflow/promptflow/azure/_models/_models.py b/src/promptflow/promptflow/core/_connection_provider/_models/_models.py similarity index 68% rename from src/promptflow/promptflow/azure/_models/_models.py rename to src/promptflow/promptflow/core/_connection_provider/_models/_models.py index 971c36d6144..d0376a0ed8f 100644 --- a/src/promptflow/promptflow/azure/_models/_models.py +++ b/src/promptflow/promptflow/core/_connection_provider/_models/_models.py @@ -59,42 +59,41 @@ class WorkspaceConnectionPropertiesV2(msrest.serialization.Model): """ _validation = { - 'auth_type': {'required': True}, - 'created_by_workspace_arm_id': {'readonly': True}, - 'group': {'readonly': True}, + "auth_type": {"required": True}, + "created_by_workspace_arm_id": {"readonly": True}, + "group": {"readonly": True}, } _attribute_map = { - 'auth_type': {'key': 'authType', 'type': 'str'}, - 'category': {'key': 'category', 'type': 'str'}, - 'created_by_workspace_arm_id': {'key': 'createdByWorkspaceArmId', 'type': 'str'}, - 'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'}, - 'group': {'key': 'group', 'type': 'str'}, - 'is_shared_to_all': {'key': 'isSharedToAll', 'type': 'bool'}, - 'metadata': {'key': 'metadata', 'type': 'object'}, - 'shared_user_list': {'key': 'sharedUserList', 'type': '[str]'}, - 'target': {'key': 'target', 'type': 'str'}, + "auth_type": {"key": "authType", "type": "str"}, + "category": {"key": "category", "type": "str"}, + "created_by_workspace_arm_id": {"key": "createdByWorkspaceArmId", "type": "str"}, + "expiry_time": {"key": "expiryTime", "type": "iso-8601"}, + "group": {"key": "group", "type": "str"}, + "is_shared_to_all": {"key": "isSharedToAll", "type": "bool"}, + "metadata": {"key": "metadata", "type": "object"}, + "shared_user_list": {"key": "sharedUserList", "type": "[str]"}, + "target": {"key": "target", "type": "str"}, } _subtype_map = { - 'auth_type': {'AAD': 'AADAuthTypeWorkspaceConnectionProperties', - 'AccessKey': 'AccessKeyAuthTypeWorkspaceConnectionProperties', - 'AccountKey': 'AccountKeyAuthTypeWorkspaceConnectionProperties', - 'ApiKey': 'ApiKeyAuthWorkspaceConnectionProperties', - 'CustomKeys': 'CustomKeysWorkspaceConnectionProperties', - 'ManagedIdentity': 'ManagedIdentityAuthTypeWorkspaceConnectionProperties', - 'None': 'NoneAuthTypeWorkspaceConnectionProperties', - 'OAuth2': 'OAuth2AuthTypeWorkspaceConnectionProperties', - 'PAT': 'PATAuthTypeWorkspaceConnectionProperties', - 'SAS': 'SASAuthTypeWorkspaceConnectionProperties', - 'ServicePrincipal': 'ServicePrincipalAuthTypeWorkspaceConnectionProperties', - 'UsernamePassword': 'UsernamePasswordAuthTypeWorkspaceConnectionProperties'} - } - - def __init__( - self, - **kwargs - ): + "auth_type": { + "AAD": "AADAuthTypeWorkspaceConnectionProperties", + "AccessKey": "AccessKeyAuthTypeWorkspaceConnectionProperties", + "AccountKey": "AccountKeyAuthTypeWorkspaceConnectionProperties", + "ApiKey": "ApiKeyAuthWorkspaceConnectionProperties", + "CustomKeys": "CustomKeysWorkspaceConnectionProperties", + "ManagedIdentity": "ManagedIdentityAuthTypeWorkspaceConnectionProperties", + "None": "NoneAuthTypeWorkspaceConnectionProperties", + "OAuth2": "OAuth2AuthTypeWorkspaceConnectionProperties", + "PAT": "PATAuthTypeWorkspaceConnectionProperties", + "SAS": "SASAuthTypeWorkspaceConnectionProperties", + "ServicePrincipal": "ServicePrincipalAuthTypeWorkspaceConnectionProperties", + "UsernamePassword": "UsernamePasswordAuthTypeWorkspaceConnectionProperties", + } + } + + def __init__(self, **kwargs): """ :keyword category: Category of the connection. Possible values include: "PythonFeed", "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", @@ -127,14 +126,14 @@ def __init__( """ super(WorkspaceConnectionPropertiesV2, self).__init__(**kwargs) self.auth_type = None # type: Optional[str] - self.category = kwargs.get('category', None) + self.category = kwargs.get("category", None) self.created_by_workspace_arm_id = None - self.expiry_time = kwargs.get('expiry_time', None) + self.expiry_time = kwargs.get("expiry_time", None) self.group = None - self.is_shared_to_all = kwargs.get('is_shared_to_all', None) - self.metadata = kwargs.get('metadata', None) - self.shared_user_list = kwargs.get('shared_user_list', None) - self.target = kwargs.get('target', None) + self.is_shared_to_all = kwargs.get("is_shared_to_all", None) + self.metadata = kwargs.get("metadata", None) + self.shared_user_list = kwargs.get("shared_user_list", None) + self.target = kwargs.get("target", None) class AADAuthTypeWorkspaceConnectionProperties(WorkspaceConnectionPropertiesV2): @@ -184,27 +183,24 @@ class AADAuthTypeWorkspaceConnectionProperties(WorkspaceConnectionPropertiesV2): """ _validation = { - 'auth_type': {'required': True}, - 'created_by_workspace_arm_id': {'readonly': True}, - 'group': {'readonly': True}, + "auth_type": {"required": True}, + "created_by_workspace_arm_id": {"readonly": True}, + "group": {"readonly": True}, } _attribute_map = { - 'auth_type': {'key': 'authType', 'type': 'str'}, - 'category': {'key': 'category', 'type': 'str'}, - 'created_by_workspace_arm_id': {'key': 'createdByWorkspaceArmId', 'type': 'str'}, - 'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'}, - 'group': {'key': 'group', 'type': 'str'}, - 'is_shared_to_all': {'key': 'isSharedToAll', 'type': 'bool'}, - 'metadata': {'key': 'metadata', 'type': 'object'}, - 'shared_user_list': {'key': 'sharedUserList', 'type': '[str]'}, - 'target': {'key': 'target', 'type': 'str'}, + "auth_type": {"key": "authType", "type": "str"}, + "category": {"key": "category", "type": "str"}, + "created_by_workspace_arm_id": {"key": "createdByWorkspaceArmId", "type": "str"}, + "expiry_time": {"key": "expiryTime", "type": "iso-8601"}, + "group": {"key": "group", "type": "str"}, + "is_shared_to_all": {"key": "isSharedToAll", "type": "bool"}, + "metadata": {"key": "metadata", "type": "object"}, + "shared_user_list": {"key": "sharedUserList", "type": "[str]"}, + "target": {"key": "target", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword category: Category of the connection. Possible values include: "PythonFeed", "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", @@ -236,7 +232,7 @@ def __init__( :paramtype target: str """ super(AADAuthTypeWorkspaceConnectionProperties, self).__init__(**kwargs) - self.auth_type = 'AAD' # type: str + self.auth_type = "AAD" # type: str class AccessKeyAuthTypeWorkspaceConnectionProperties(WorkspaceConnectionPropertiesV2): @@ -288,28 +284,25 @@ class AccessKeyAuthTypeWorkspaceConnectionProperties(WorkspaceConnectionProperti """ _validation = { - 'auth_type': {'required': True}, - 'created_by_workspace_arm_id': {'readonly': True}, - 'group': {'readonly': True}, + "auth_type": {"required": True}, + "created_by_workspace_arm_id": {"readonly": True}, + "group": {"readonly": True}, } _attribute_map = { - 'auth_type': {'key': 'authType', 'type': 'str'}, - 'category': {'key': 'category', 'type': 'str'}, - 'created_by_workspace_arm_id': {'key': 'createdByWorkspaceArmId', 'type': 'str'}, - 'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'}, - 'group': {'key': 'group', 'type': 'str'}, - 'is_shared_to_all': {'key': 'isSharedToAll', 'type': 'bool'}, - 'metadata': {'key': 'metadata', 'type': 'object'}, - 'shared_user_list': {'key': 'sharedUserList', 'type': '[str]'}, - 'target': {'key': 'target', 'type': 'str'}, - 'credentials': {'key': 'credentials', 'type': 'WorkspaceConnectionAccessKey'}, + "auth_type": {"key": "authType", "type": "str"}, + "category": {"key": "category", "type": "str"}, + "created_by_workspace_arm_id": {"key": "createdByWorkspaceArmId", "type": "str"}, + "expiry_time": {"key": "expiryTime", "type": "iso-8601"}, + "group": {"key": "group", "type": "str"}, + "is_shared_to_all": {"key": "isSharedToAll", "type": "bool"}, + "metadata": {"key": "metadata", "type": "object"}, + "shared_user_list": {"key": "sharedUserList", "type": "[str]"}, + "target": {"key": "target", "type": "str"}, + "credentials": {"key": "credentials", "type": "WorkspaceConnectionAccessKey"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword category: Category of the connection. Possible values include: "PythonFeed", "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", @@ -343,8 +336,8 @@ def __init__( :paramtype credentials: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionAccessKey """ super(AccessKeyAuthTypeWorkspaceConnectionProperties, self).__init__(**kwargs) - self.auth_type = 'AccessKey' # type: str - self.credentials = kwargs.get('credentials', None) + self.auth_type = "AccessKey" # type: str + self.credentials = kwargs.get("credentials", None) class AccountApiKeys(msrest.serialization.Model): @@ -357,14 +350,11 @@ class AccountApiKeys(msrest.serialization.Model): """ _attribute_map = { - 'key1': {'key': 'key1', 'type': 'str'}, - 'key2': {'key': 'key2', 'type': 'str'}, + "key1": {"key": "key1", "type": "str"}, + "key2": {"key": "key2", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword key1: :paramtype key1: str @@ -372,8 +362,8 @@ def __init__( :paramtype key2: str """ super(AccountApiKeys, self).__init__(**kwargs) - self.key1 = kwargs.get('key1', None) - self.key2 = kwargs.get('key2', None) + self.key1 = kwargs.get("key1", None) + self.key2 = kwargs.get("key2", None) class AccountKeyAuthTypeWorkspaceConnectionProperties(WorkspaceConnectionPropertiesV2): @@ -426,28 +416,25 @@ class AccountKeyAuthTypeWorkspaceConnectionProperties(WorkspaceConnectionPropert """ _validation = { - 'auth_type': {'required': True}, - 'created_by_workspace_arm_id': {'readonly': True}, - 'group': {'readonly': True}, + "auth_type": {"required": True}, + "created_by_workspace_arm_id": {"readonly": True}, + "group": {"readonly": True}, } _attribute_map = { - 'auth_type': {'key': 'authType', 'type': 'str'}, - 'category': {'key': 'category', 'type': 'str'}, - 'created_by_workspace_arm_id': {'key': 'createdByWorkspaceArmId', 'type': 'str'}, - 'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'}, - 'group': {'key': 'group', 'type': 'str'}, - 'is_shared_to_all': {'key': 'isSharedToAll', 'type': 'bool'}, - 'metadata': {'key': 'metadata', 'type': 'object'}, - 'shared_user_list': {'key': 'sharedUserList', 'type': '[str]'}, - 'target': {'key': 'target', 'type': 'str'}, - 'credentials': {'key': 'credentials', 'type': 'WorkspaceConnectionSharedAccessSignature'}, + "auth_type": {"key": "authType", "type": "str"}, + "category": {"key": "category", "type": "str"}, + "created_by_workspace_arm_id": {"key": "createdByWorkspaceArmId", "type": "str"}, + "expiry_time": {"key": "expiryTime", "type": "iso-8601"}, + "group": {"key": "group", "type": "str"}, + "is_shared_to_all": {"key": "isSharedToAll", "type": "bool"}, + "metadata": {"key": "metadata", "type": "object"}, + "shared_user_list": {"key": "sharedUserList", "type": "[str]"}, + "target": {"key": "target", "type": "str"}, + "credentials": {"key": "credentials", "type": "WorkspaceConnectionSharedAccessSignature"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword category: Category of the connection. Possible values include: "PythonFeed", "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", @@ -482,8 +469,8 @@ def __init__( ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionSharedAccessSignature """ super(AccountKeyAuthTypeWorkspaceConnectionProperties, self).__init__(**kwargs) - self.auth_type = 'AccountKey' # type: str - self.credentials = kwargs.get('credentials', None) + self.auth_type = "AccountKey" # type: str + self.credentials = kwargs.get("credentials", None) class DatastoreCredentials(msrest.serialization.Model): @@ -501,28 +488,27 @@ class DatastoreCredentials(msrest.serialization.Model): """ _validation = { - 'credentials_type': {'required': True}, + "credentials_type": {"required": True}, } _attribute_map = { - 'credentials_type': {'key': 'credentialsType', 'type': 'str'}, + "credentials_type": {"key": "credentialsType", "type": "str"}, } _subtype_map = { - 'credentials_type': {'AccountKey': 'AccountKeyDatastoreCredentials', - 'Certificate': 'CertificateDatastoreCredentials', - 'KerberosKeytab': 'KerberosKeytabCredentials', - 'KerberosPassword': 'KerberosPasswordCredentials', 'None': 'NoneDatastoreCredentials', - 'Sas': 'SasDatastoreCredentials', - 'ServicePrincipal': 'ServicePrincipalDatastoreCredentials'} - } - - def __init__( - self, - **kwargs - ): - """ - """ + "credentials_type": { + "AccountKey": "AccountKeyDatastoreCredentials", + "Certificate": "CertificateDatastoreCredentials", + "KerberosKeytab": "KerberosKeytabCredentials", + "KerberosPassword": "KerberosPasswordCredentials", + "None": "NoneDatastoreCredentials", + "Sas": "SasDatastoreCredentials", + "ServicePrincipal": "ServicePrincipalDatastoreCredentials", + } + } + + def __init__(self, **kwargs): + """ """ super(DatastoreCredentials, self).__init__(**kwargs) self.credentials_type = None # type: Optional[str] @@ -541,26 +527,23 @@ class AccountKeyDatastoreCredentials(DatastoreCredentials): """ _validation = { - 'credentials_type': {'required': True}, - 'secrets': {'required': True}, + "credentials_type": {"required": True}, + "secrets": {"required": True}, } _attribute_map = { - 'credentials_type': {'key': 'credentialsType', 'type': 'str'}, - 'secrets': {'key': 'secrets', 'type': 'AccountKeyDatastoreSecrets'}, + "credentials_type": {"key": "credentialsType", "type": "str"}, + "secrets": {"key": "secrets", "type": "AccountKeyDatastoreSecrets"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword secrets: Required. [Required] Storage account secrets. :paramtype secrets: ~azure.mgmt.machinelearningservices.models.AccountKeyDatastoreSecrets """ super(AccountKeyDatastoreCredentials, self).__init__(**kwargs) - self.credentials_type = 'AccountKey' # type: str - self.secrets = kwargs['secrets'] + self.credentials_type = "AccountKey" # type: str + self.secrets = kwargs["secrets"] class DatastoreSecrets(msrest.serialization.Model): @@ -578,25 +561,26 @@ class DatastoreSecrets(msrest.serialization.Model): """ _validation = { - 'secrets_type': {'required': True}, + "secrets_type": {"required": True}, } _attribute_map = { - 'secrets_type': {'key': 'secretsType', 'type': 'str'}, + "secrets_type": {"key": "secretsType", "type": "str"}, } _subtype_map = { - 'secrets_type': {'AccountKey': 'AccountKeyDatastoreSecrets', 'Certificate': 'CertificateDatastoreSecrets', - 'KerberosKeytab': 'KerberosKeytabSecrets', 'KerberosPassword': 'KerberosPasswordSecrets', - 'Sas': 'SasDatastoreSecrets', 'ServicePrincipal': 'ServicePrincipalDatastoreSecrets'} - } - - def __init__( - self, - **kwargs - ): - """ - """ + "secrets_type": { + "AccountKey": "AccountKeyDatastoreSecrets", + "Certificate": "CertificateDatastoreSecrets", + "KerberosKeytab": "KerberosKeytabSecrets", + "KerberosPassword": "KerberosPasswordSecrets", + "Sas": "SasDatastoreSecrets", + "ServicePrincipal": "ServicePrincipalDatastoreSecrets", + } + } + + def __init__(self, **kwargs): + """ """ super(DatastoreSecrets, self).__init__(**kwargs) self.secrets_type = None # type: Optional[str] @@ -615,25 +599,22 @@ class AccountKeyDatastoreSecrets(DatastoreSecrets): """ _validation = { - 'secrets_type': {'required': True}, + "secrets_type": {"required": True}, } _attribute_map = { - 'secrets_type': {'key': 'secretsType', 'type': 'str'}, - 'key': {'key': 'key', 'type': 'str'}, + "secrets_type": {"key": "secretsType", "type": "str"}, + "key": {"key": "key", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword key: Storage account key. :paramtype key: str """ super(AccountKeyDatastoreSecrets, self).__init__(**kwargs) - self.secrets_type = 'AccountKey' # type: str - self.key = kwargs.get('key', None) + self.secrets_type = "AccountKey" # type: str + self.key = kwargs.get("key", None) class DeploymentModel(msrest.serialization.Model): @@ -657,21 +638,18 @@ class DeploymentModel(msrest.serialization.Model): """ _validation = { - 'call_rate_limit': {'readonly': True}, + "call_rate_limit": {"readonly": True}, } _attribute_map = { - 'format': {'key': 'format', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'version': {'key': 'version', 'type': 'str'}, - 'source': {'key': 'source', 'type': 'str'}, - 'call_rate_limit': {'key': 'callRateLimit', 'type': 'CallRateLimit'}, + "format": {"key": "format", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "version": {"key": "version", "type": "str"}, + "source": {"key": "source", "type": "str"}, + "call_rate_limit": {"key": "callRateLimit", "type": "CallRateLimit"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword format: Deployment model format. :paramtype format: str @@ -686,10 +664,10 @@ def __init__( :paramtype source: str """ super(DeploymentModel, self).__init__(**kwargs) - self.format = kwargs.get('format', None) - self.name = kwargs.get('name', None) - self.version = kwargs.get('version', None) - self.source = kwargs.get('source', None) + self.format = kwargs.get("format", None) + self.name = kwargs.get("name", None) + self.version = kwargs.get("version", None) + self.source = kwargs.get("source", None) self.call_rate_limit = None @@ -734,31 +712,28 @@ class AccountModel(DeploymentModel): """ _validation = { - 'call_rate_limit': {'readonly': True}, - 'system_data': {'readonly': True}, + "call_rate_limit": {"readonly": True}, + "system_data": {"readonly": True}, } _attribute_map = { - 'format': {'key': 'format', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'version': {'key': 'version', 'type': 'str'}, - 'source': {'key': 'source', 'type': 'str'}, - 'call_rate_limit': {'key': 'callRateLimit', 'type': 'CallRateLimit'}, - 'base_model': {'key': 'baseModel', 'type': 'DeploymentModel'}, - 'is_default_version': {'key': 'isDefaultVersion', 'type': 'bool'}, - 'skus': {'key': 'skus', 'type': '[ModelSku]'}, - 'max_capacity': {'key': 'maxCapacity', 'type': 'int'}, - 'capabilities': {'key': 'capabilities', 'type': '{str}'}, - 'finetune_capabilities': {'key': 'finetuneCapabilities', 'type': '{str}'}, - 'deprecation': {'key': 'deprecation', 'type': 'ModelDeprecationInfo'}, - 'lifecycle_status': {'key': 'lifecycleStatus', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + "format": {"key": "format", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "version": {"key": "version", "type": "str"}, + "source": {"key": "source", "type": "str"}, + "call_rate_limit": {"key": "callRateLimit", "type": "CallRateLimit"}, + "base_model": {"key": "baseModel", "type": "DeploymentModel"}, + "is_default_version": {"key": "isDefaultVersion", "type": "bool"}, + "skus": {"key": "skus", "type": "[ModelSku]"}, + "max_capacity": {"key": "maxCapacity", "type": "int"}, + "capabilities": {"key": "capabilities", "type": "{str}"}, + "finetune_capabilities": {"key": "finetuneCapabilities", "type": "{str}"}, + "deprecation": {"key": "deprecation", "type": "ModelDeprecationInfo"}, + "lifecycle_status": {"key": "lifecycleStatus", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword format: Deployment model format. :paramtype format: str @@ -791,14 +766,14 @@ def __init__( ~azure.mgmt.machinelearningservices.models.ModelLifecycleStatus """ super(AccountModel, self).__init__(**kwargs) - self.base_model = kwargs.get('base_model', None) - self.is_default_version = kwargs.get('is_default_version', None) - self.skus = kwargs.get('skus', None) - self.max_capacity = kwargs.get('max_capacity', None) - self.capabilities = kwargs.get('capabilities', None) - self.finetune_capabilities = kwargs.get('finetune_capabilities', None) - self.deprecation = kwargs.get('deprecation', None) - self.lifecycle_status = kwargs.get('lifecycle_status', None) + self.base_model = kwargs.get("base_model", None) + self.is_default_version = kwargs.get("is_default_version", None) + self.skus = kwargs.get("skus", None) + self.max_capacity = kwargs.get("max_capacity", None) + self.capabilities = kwargs.get("capabilities", None) + self.finetune_capabilities = kwargs.get("finetune_capabilities", None) + self.deprecation = kwargs.get("deprecation", None) + self.lifecycle_status = kwargs.get("lifecycle_status", None) self.system_data = None @@ -816,14 +791,11 @@ class AcrDetails(msrest.serialization.Model): """ _attribute_map = { - 'system_created_acr_account': {'key': 'systemCreatedAcrAccount', 'type': 'SystemCreatedAcrAccount'}, - 'user_created_acr_account': {'key': 'userCreatedAcrAccount', 'type': 'UserCreatedAcrAccount'}, + "system_created_acr_account": {"key": "systemCreatedAcrAccount", "type": "SystemCreatedAcrAccount"}, + "user_created_acr_account": {"key": "userCreatedAcrAccount", "type": "UserCreatedAcrAccount"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword system_created_acr_account: Details of system created ACR account to be used for the Registry. @@ -835,8 +807,8 @@ def __init__( ~azure.mgmt.machinelearningservices.models.UserCreatedAcrAccount """ super(AcrDetails, self).__init__(**kwargs) - self.system_created_acr_account = kwargs.get('system_created_acr_account', None) - self.user_created_acr_account = kwargs.get('user_created_acr_account', None) + self.system_created_acr_account = kwargs.get("system_created_acr_account", None) + self.user_created_acr_account = kwargs.get("user_created_acr_account", None) class ActualCapacityInfo(msrest.serialization.Model): @@ -853,15 +825,12 @@ class ActualCapacityInfo(msrest.serialization.Model): """ _attribute_map = { - 'allocated': {'key': 'allocated', 'type': 'int'}, - 'assignment_failed': {'key': 'assignmentFailed', 'type': 'int'}, - 'assignment_success': {'key': 'assignmentSuccess', 'type': 'int'}, + "allocated": {"key": "allocated", "type": "int"}, + "assignment_failed": {"key": "assignmentFailed", "type": "int"}, + "assignment_success": {"key": "assignmentSuccess", "type": "int"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword allocated: Gets or sets the total number of instances for the group. :paramtype allocated: int @@ -873,9 +842,9 @@ def __init__( :paramtype assignment_success: int """ super(ActualCapacityInfo, self).__init__(**kwargs) - self.allocated = kwargs.get('allocated', 0) - self.assignment_failed = kwargs.get('assignment_failed', 0) - self.assignment_success = kwargs.get('assignment_success', 0) + self.allocated = kwargs.get("allocated", 0) + self.assignment_failed = kwargs.get("assignment_failed", 0) + self.assignment_success = kwargs.get("assignment_success", 0) class AKSSchema(msrest.serialization.Model): @@ -886,19 +855,16 @@ class AKSSchema(msrest.serialization.Model): """ _attribute_map = { - 'properties': {'key': 'properties', 'type': 'AKSSchemaProperties'}, + "properties": {"key": "properties", "type": "AKSSchemaProperties"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword properties: AKS properties. :paramtype properties: ~azure.mgmt.machinelearningservices.models.AKSSchemaProperties """ super(AKSSchema, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) + self.properties = kwargs.get("properties", None) class Compute(msrest.serialization.Model): @@ -941,38 +907,43 @@ class Compute(msrest.serialization.Model): """ _validation = { - 'compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, + "compute_type": {"required": True}, + "provisioning_state": {"readonly": True}, + "created_on": {"readonly": True}, + "modified_on": {"readonly": True}, + "provisioning_errors": {"readonly": True}, + "is_attached_compute": {"readonly": True}, } _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, + "compute_type": {"key": "computeType", "type": "str"}, + "compute_location": {"key": "computeLocation", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "created_on": {"key": "createdOn", "type": "iso-8601"}, + "modified_on": {"key": "modifiedOn", "type": "iso-8601"}, + "resource_id": {"key": "resourceId", "type": "str"}, + "provisioning_errors": {"key": "provisioningErrors", "type": "[ErrorResponse]"}, + "is_attached_compute": {"key": "isAttachedCompute", "type": "bool"}, + "disable_local_auth": {"key": "disableLocalAuth", "type": "bool"}, } _subtype_map = { - 'compute_type': {'AKS': 'AKS', 'AmlCompute': 'AmlCompute', 'ComputeInstance': 'ComputeInstance', - 'DataFactory': 'DataFactory', 'DataLakeAnalytics': 'DataLakeAnalytics', - 'Databricks': 'Databricks', 'HDInsight': 'HDInsight', 'Kubernetes': 'Kubernetes', - 'SynapseSpark': 'SynapseSpark', 'VirtualMachine': 'VirtualMachine'} - } - - def __init__( - self, - **kwargs - ): + "compute_type": { + "AKS": "AKS", + "AmlCompute": "AmlCompute", + "ComputeInstance": "ComputeInstance", + "DataFactory": "DataFactory", + "DataLakeAnalytics": "DataLakeAnalytics", + "Databricks": "Databricks", + "HDInsight": "HDInsight", + "Kubernetes": "Kubernetes", + "SynapseSpark": "SynapseSpark", + "VirtualMachine": "VirtualMachine", + } + } + + def __init__(self, **kwargs): """ :keyword compute_location: Location for the underlying compute. :paramtype compute_location: str @@ -986,15 +957,15 @@ def __init__( """ super(Compute, self).__init__(**kwargs) self.compute_type = None # type: Optional[str] - self.compute_location = kwargs.get('compute_location', None) + self.compute_location = kwargs.get("compute_location", None) self.provisioning_state = None - self.description = kwargs.get('description', None) + self.description = kwargs.get("description", None) self.created_on = None self.modified_on = None - self.resource_id = kwargs.get('resource_id', None) + self.resource_id = kwargs.get("resource_id", None) self.provisioning_errors = None self.is_attached_compute = None - self.disable_local_auth = kwargs.get('disable_local_auth', None) + self.disable_local_auth = kwargs.get("disable_local_auth", None) class AKS(Compute, AKSSchema): @@ -1036,32 +1007,29 @@ class AKS(Compute, AKSSchema): """ _validation = { - 'compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, + "compute_type": {"required": True}, + "provisioning_state": {"readonly": True}, + "created_on": {"readonly": True}, + "modified_on": {"readonly": True}, + "provisioning_errors": {"readonly": True}, + "is_attached_compute": {"readonly": True}, } _attribute_map = { - 'properties': {'key': 'properties', 'type': 'AKSSchemaProperties'}, - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, + "properties": {"key": "properties", "type": "AKSSchemaProperties"}, + "compute_type": {"key": "computeType", "type": "str"}, + "compute_location": {"key": "computeLocation", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "created_on": {"key": "createdOn", "type": "iso-8601"}, + "modified_on": {"key": "modifiedOn", "type": "iso-8601"}, + "resource_id": {"key": "resourceId", "type": "str"}, + "provisioning_errors": {"key": "provisioningErrors", "type": "[ErrorResponse]"}, + "is_attached_compute": {"key": "isAttachedCompute", "type": "bool"}, + "disable_local_auth": {"key": "disableLocalAuth", "type": "bool"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword properties: AKS properties. :paramtype properties: ~azure.mgmt.machinelearningservices.models.AKSSchemaProperties @@ -1076,17 +1044,17 @@ def __init__( :paramtype disable_local_auth: bool """ super(AKS, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) - self.compute_type = 'AKS' # type: str - self.compute_location = kwargs.get('compute_location', None) + self.properties = kwargs.get("properties", None) + self.compute_type = "AKS" # type: str + self.compute_location = kwargs.get("compute_location", None) self.provisioning_state = None - self.description = kwargs.get('description', None) + self.description = kwargs.get("description", None) self.created_on = None self.modified_on = None - self.resource_id = kwargs.get('resource_id', None) + self.resource_id = kwargs.get("resource_id", None) self.provisioning_errors = None self.is_attached_compute = None - self.disable_local_auth = kwargs.get('disable_local_auth', None) + self.disable_local_auth = kwargs.get("disable_local_auth", None) class AksComputeSecretsProperties(msrest.serialization.Model): @@ -1103,15 +1071,12 @@ class AksComputeSecretsProperties(msrest.serialization.Model): """ _attribute_map = { - 'user_kube_config': {'key': 'userKubeConfig', 'type': 'str'}, - 'admin_kube_config': {'key': 'adminKubeConfig', 'type': 'str'}, - 'image_pull_secret_name': {'key': 'imagePullSecretName', 'type': 'str'}, + "user_kube_config": {"key": "userKubeConfig", "type": "str"}, + "admin_kube_config": {"key": "adminKubeConfig", "type": "str"}, + "image_pull_secret_name": {"key": "imagePullSecretName", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword user_kube_config: Content of kubeconfig file that can be used to connect to the Kubernetes cluster. @@ -1123,9 +1088,9 @@ def __init__( :paramtype image_pull_secret_name: str """ super(AksComputeSecretsProperties, self).__init__(**kwargs) - self.user_kube_config = kwargs.get('user_kube_config', None) - self.admin_kube_config = kwargs.get('admin_kube_config', None) - self.image_pull_secret_name = kwargs.get('image_pull_secret_name', None) + self.user_kube_config = kwargs.get("user_kube_config", None) + self.admin_kube_config = kwargs.get("admin_kube_config", None) + self.image_pull_secret_name = kwargs.get("image_pull_secret_name", None) class ComputeSecrets(msrest.serialization.Model): @@ -1143,24 +1108,23 @@ class ComputeSecrets(msrest.serialization.Model): """ _validation = { - 'compute_type': {'required': True}, + "compute_type": {"required": True}, } _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, + "compute_type": {"key": "computeType", "type": "str"}, } _subtype_map = { - 'compute_type': {'AKS': 'AksComputeSecrets', 'Databricks': 'DatabricksComputeSecrets', - 'VirtualMachine': 'VirtualMachineSecrets'} + "compute_type": { + "AKS": "AksComputeSecrets", + "Databricks": "DatabricksComputeSecrets", + "VirtualMachine": "VirtualMachineSecrets", + } } - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(ComputeSecrets, self).__init__(**kwargs) self.compute_type = None # type: Optional[str] @@ -1185,20 +1149,17 @@ class AksComputeSecrets(ComputeSecrets, AksComputeSecretsProperties): """ _validation = { - 'compute_type': {'required': True}, + "compute_type": {"required": True}, } _attribute_map = { - 'user_kube_config': {'key': 'userKubeConfig', 'type': 'str'}, - 'admin_kube_config': {'key': 'adminKubeConfig', 'type': 'str'}, - 'image_pull_secret_name': {'key': 'imagePullSecretName', 'type': 'str'}, - 'compute_type': {'key': 'computeType', 'type': 'str'}, + "user_kube_config": {"key": "userKubeConfig", "type": "str"}, + "admin_kube_config": {"key": "adminKubeConfig", "type": "str"}, + "image_pull_secret_name": {"key": "imagePullSecretName", "type": "str"}, + "compute_type": {"key": "computeType", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword user_kube_config: Content of kubeconfig file that can be used to connect to the Kubernetes cluster. @@ -1210,10 +1171,10 @@ def __init__( :paramtype image_pull_secret_name: str """ super(AksComputeSecrets, self).__init__(**kwargs) - self.user_kube_config = kwargs.get('user_kube_config', None) - self.admin_kube_config = kwargs.get('admin_kube_config', None) - self.image_pull_secret_name = kwargs.get('image_pull_secret_name', None) - self.compute_type = 'AKS' # type: str + self.user_kube_config = kwargs.get("user_kube_config", None) + self.admin_kube_config = kwargs.get("admin_kube_config", None) + self.image_pull_secret_name = kwargs.get("image_pull_secret_name", None) + self.compute_type = "AKS" # type: str class AksNetworkingConfiguration(msrest.serialization.Model): @@ -1233,23 +1194,21 @@ class AksNetworkingConfiguration(msrest.serialization.Model): """ _validation = { - 'service_cidr': {'pattern': r'^([0-9]{1,3}\.){3}[0-9]{1,3}(\/([0-9]|[1-2][0-9]|3[0-2]))?$'}, - 'dns_service_ip': { - 'pattern': r'^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$'}, - 'docker_bridge_cidr': {'pattern': r'^([0-9]{1,3}\.){3}[0-9]{1,3}(\/([0-9]|[1-2][0-9]|3[0-2]))?$'}, + "service_cidr": {"pattern": r"^([0-9]{1,3}\.){3}[0-9]{1,3}(\/([0-9]|[1-2][0-9]|3[0-2]))?$"}, + "dns_service_ip": { + "pattern": r"^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$" + }, + "docker_bridge_cidr": {"pattern": r"^([0-9]{1,3}\.){3}[0-9]{1,3}(\/([0-9]|[1-2][0-9]|3[0-2]))?$"}, } _attribute_map = { - 'subnet_id': {'key': 'subnetId', 'type': 'str'}, - 'service_cidr': {'key': 'serviceCidr', 'type': 'str'}, - 'dns_service_ip': {'key': 'dnsServiceIP', 'type': 'str'}, - 'docker_bridge_cidr': {'key': 'dockerBridgeCidr', 'type': 'str'}, + "subnet_id": {"key": "subnetId", "type": "str"}, + "service_cidr": {"key": "serviceCidr", "type": "str"}, + "dns_service_ip": {"key": "dnsServiceIP", "type": "str"}, + "docker_bridge_cidr": {"key": "dockerBridgeCidr", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword subnet_id: Virtual network subnet resource ID the compute nodes belong to. :paramtype subnet_id: str @@ -1264,10 +1223,10 @@ def __init__( :paramtype docker_bridge_cidr: str """ super(AksNetworkingConfiguration, self).__init__(**kwargs) - self.subnet_id = kwargs.get('subnet_id', None) - self.service_cidr = kwargs.get('service_cidr', None) - self.dns_service_ip = kwargs.get('dns_service_ip', None) - self.docker_bridge_cidr = kwargs.get('docker_bridge_cidr', None) + self.subnet_id = kwargs.get("subnet_id", None) + self.service_cidr = kwargs.get("service_cidr", None) + self.dns_service_ip = kwargs.get("dns_service_ip", None) + self.docker_bridge_cidr = kwargs.get("docker_bridge_cidr", None) class AKSSchemaProperties(msrest.serialization.Model): @@ -1299,26 +1258,23 @@ class AKSSchemaProperties(msrest.serialization.Model): """ _validation = { - 'system_services': {'readonly': True}, - 'agent_count': {'minimum': 0}, + "system_services": {"readonly": True}, + "agent_count": {"minimum": 0}, } _attribute_map = { - 'cluster_fqdn': {'key': 'clusterFqdn', 'type': 'str'}, - 'system_services': {'key': 'systemServices', 'type': '[SystemService]'}, - 'agent_count': {'key': 'agentCount', 'type': 'int'}, - 'agent_vm_size': {'key': 'agentVmSize', 'type': 'str'}, - 'cluster_purpose': {'key': 'clusterPurpose', 'type': 'str'}, - 'ssl_configuration': {'key': 'sslConfiguration', 'type': 'SslConfiguration'}, - 'aks_networking_configuration': {'key': 'aksNetworkingConfiguration', 'type': 'AksNetworkingConfiguration'}, - 'load_balancer_type': {'key': 'loadBalancerType', 'type': 'str'}, - 'load_balancer_subnet': {'key': 'loadBalancerSubnet', 'type': 'str'}, + "cluster_fqdn": {"key": "clusterFqdn", "type": "str"}, + "system_services": {"key": "systemServices", "type": "[SystemService]"}, + "agent_count": {"key": "agentCount", "type": "int"}, + "agent_vm_size": {"key": "agentVmSize", "type": "str"}, + "cluster_purpose": {"key": "clusterPurpose", "type": "str"}, + "ssl_configuration": {"key": "sslConfiguration", "type": "SslConfiguration"}, + "aks_networking_configuration": {"key": "aksNetworkingConfiguration", "type": "AksNetworkingConfiguration"}, + "load_balancer_type": {"key": "loadBalancerType", "type": "str"}, + "load_balancer_subnet": {"key": "loadBalancerSubnet", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword cluster_fqdn: Cluster full qualified domain name. :paramtype cluster_fqdn: str @@ -1342,15 +1298,15 @@ def __init__( :paramtype load_balancer_subnet: str """ super(AKSSchemaProperties, self).__init__(**kwargs) - self.cluster_fqdn = kwargs.get('cluster_fqdn', None) + self.cluster_fqdn = kwargs.get("cluster_fqdn", None) self.system_services = None - self.agent_count = kwargs.get('agent_count', None) - self.agent_vm_size = kwargs.get('agent_vm_size', None) - self.cluster_purpose = kwargs.get('cluster_purpose', "FastProd") - self.ssl_configuration = kwargs.get('ssl_configuration', None) - self.aks_networking_configuration = kwargs.get('aks_networking_configuration', None) - self.load_balancer_type = kwargs.get('load_balancer_type', "PublicIp") - self.load_balancer_subnet = kwargs.get('load_balancer_subnet', None) + self.agent_count = kwargs.get("agent_count", None) + self.agent_vm_size = kwargs.get("agent_vm_size", None) + self.cluster_purpose = kwargs.get("cluster_purpose", "FastProd") + self.ssl_configuration = kwargs.get("ssl_configuration", None) + self.aks_networking_configuration = kwargs.get("aks_networking_configuration", None) + self.load_balancer_type = kwargs.get("load_balancer_type", "PublicIp") + self.load_balancer_subnet = kwargs.get("load_balancer_subnet", None) class MonitoringFeatureFilterBase(msrest.serialization.Model): @@ -1369,24 +1325,23 @@ class MonitoringFeatureFilterBase(msrest.serialization.Model): """ _validation = { - 'filter_type': {'required': True}, + "filter_type": {"required": True}, } _attribute_map = { - 'filter_type': {'key': 'filterType', 'type': 'str'}, + "filter_type": {"key": "filterType", "type": "str"}, } _subtype_map = { - 'filter_type': {'AllFeatures': 'AllFeatures', 'FeatureSubset': 'FeatureSubset', - 'TopNByAttribution': 'TopNFeaturesByAttribution'} + "filter_type": { + "AllFeatures": "AllFeatures", + "FeatureSubset": "FeatureSubset", + "TopNByAttribution": "TopNFeaturesByAttribution", + } } - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(MonitoringFeatureFilterBase, self).__init__(**kwargs) self.filter_type = None # type: Optional[str] @@ -1404,21 +1359,17 @@ class AllFeatures(MonitoringFeatureFilterBase): """ _validation = { - 'filter_type': {'required': True}, + "filter_type": {"required": True}, } _attribute_map = { - 'filter_type': {'key': 'filterType', 'type': 'str'}, + "filter_type": {"key": "filterType", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(AllFeatures, self).__init__(**kwargs) - self.filter_type = 'AllFeatures' # type: str + self.filter_type = "AllFeatures" # type: str class Nodes(msrest.serialization.Model): @@ -1435,23 +1386,17 @@ class Nodes(msrest.serialization.Model): """ _validation = { - 'nodes_value_type': {'required': True}, + "nodes_value_type": {"required": True}, } _attribute_map = { - 'nodes_value_type': {'key': 'nodesValueType', 'type': 'str'}, + "nodes_value_type": {"key": "nodesValueType", "type": "str"}, } - _subtype_map = { - 'nodes_value_type': {'All': 'AllNodes'} - } + _subtype_map = {"nodes_value_type": {"All": "AllNodes"}} - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(Nodes, self).__init__(**kwargs) self.nodes_value_type = None # type: Optional[str] @@ -1467,21 +1412,17 @@ class AllNodes(Nodes): """ _validation = { - 'nodes_value_type': {'required': True}, + "nodes_value_type": {"required": True}, } _attribute_map = { - 'nodes_value_type': {'key': 'nodesValueType', 'type': 'str'}, + "nodes_value_type": {"key": "nodesValueType", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(AllNodes, self).__init__(**kwargs) - self.nodes_value_type = 'All' # type: str + self.nodes_value_type = "All" # type: str class AmlComputeSchema(msrest.serialization.Model): @@ -1492,19 +1433,16 @@ class AmlComputeSchema(msrest.serialization.Model): """ _attribute_map = { - 'properties': {'key': 'properties', 'type': 'AmlComputeProperties'}, + "properties": {"key": "properties", "type": "AmlComputeProperties"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword properties: Properties of AmlCompute. :paramtype properties: ~azure.mgmt.machinelearningservices.models.AmlComputeProperties """ super(AmlComputeSchema, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) + self.properties = kwargs.get("properties", None) class AmlCompute(Compute, AmlComputeSchema): @@ -1546,32 +1484,29 @@ class AmlCompute(Compute, AmlComputeSchema): """ _validation = { - 'compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, + "compute_type": {"required": True}, + "provisioning_state": {"readonly": True}, + "created_on": {"readonly": True}, + "modified_on": {"readonly": True}, + "provisioning_errors": {"readonly": True}, + "is_attached_compute": {"readonly": True}, } _attribute_map = { - 'properties': {'key': 'properties', 'type': 'AmlComputeProperties'}, - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, + "properties": {"key": "properties", "type": "AmlComputeProperties"}, + "compute_type": {"key": "computeType", "type": "str"}, + "compute_location": {"key": "computeLocation", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "created_on": {"key": "createdOn", "type": "iso-8601"}, + "modified_on": {"key": "modifiedOn", "type": "iso-8601"}, + "resource_id": {"key": "resourceId", "type": "str"}, + "provisioning_errors": {"key": "provisioningErrors", "type": "[ErrorResponse]"}, + "is_attached_compute": {"key": "isAttachedCompute", "type": "bool"}, + "disable_local_auth": {"key": "disableLocalAuth", "type": "bool"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword properties: Properties of AmlCompute. :paramtype properties: ~azure.mgmt.machinelearningservices.models.AmlComputeProperties @@ -1586,17 +1521,17 @@ def __init__( :paramtype disable_local_auth: bool """ super(AmlCompute, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) - self.compute_type = 'AmlCompute' # type: str - self.compute_location = kwargs.get('compute_location', None) + self.properties = kwargs.get("properties", None) + self.compute_type = "AmlCompute" # type: str + self.compute_location = kwargs.get("compute_location", None) self.provisioning_state = None - self.description = kwargs.get('description', None) + self.description = kwargs.get("description", None) self.created_on = None self.modified_on = None - self.resource_id = kwargs.get('resource_id', None) + self.resource_id = kwargs.get("resource_id", None) self.provisioning_errors = None self.is_attached_compute = None - self.disable_local_auth = kwargs.get('disable_local_auth', None) + self.disable_local_auth = kwargs.get("disable_local_auth", None) class AmlComputeNodeInformation(msrest.serialization.Model): @@ -1621,29 +1556,25 @@ class AmlComputeNodeInformation(msrest.serialization.Model): """ _validation = { - 'node_id': {'readonly': True}, - 'private_ip_address': {'readonly': True}, - 'public_ip_address': {'readonly': True}, - 'port': {'readonly': True}, - 'node_state': {'readonly': True}, - 'run_id': {'readonly': True}, + "node_id": {"readonly": True}, + "private_ip_address": {"readonly": True}, + "public_ip_address": {"readonly": True}, + "port": {"readonly": True}, + "node_state": {"readonly": True}, + "run_id": {"readonly": True}, } _attribute_map = { - 'node_id': {'key': 'nodeId', 'type': 'str'}, - 'private_ip_address': {'key': 'privateIpAddress', 'type': 'str'}, - 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'}, - 'port': {'key': 'port', 'type': 'int'}, - 'node_state': {'key': 'nodeState', 'type': 'str'}, - 'run_id': {'key': 'runId', 'type': 'str'}, + "node_id": {"key": "nodeId", "type": "str"}, + "private_ip_address": {"key": "privateIpAddress", "type": "str"}, + "public_ip_address": {"key": "publicIpAddress", "type": "str"}, + "port": {"key": "port", "type": "int"}, + "node_state": {"key": "nodeState", "type": "str"}, + "run_id": {"key": "runId", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(AmlComputeNodeInformation, self).__init__(**kwargs) self.node_id = None self.private_ip_address = None @@ -1665,21 +1596,17 @@ class AmlComputeNodesInformation(msrest.serialization.Model): """ _validation = { - 'nodes': {'readonly': True}, - 'next_link': {'readonly': True}, + "nodes": {"readonly": True}, + "next_link": {"readonly": True}, } _attribute_map = { - 'nodes': {'key': 'nodes', 'type': '[AmlComputeNodeInformation]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + "nodes": {"key": "nodes", "type": "[AmlComputeNodeInformation]"}, + "next_link": {"key": "nextLink", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(AmlComputeNodesInformation, self).__init__(**kwargs) self.nodes = None self.next_link = None @@ -1750,38 +1677,35 @@ class AmlComputeProperties(msrest.serialization.Model): """ _validation = { - 'allocation_state': {'readonly': True}, - 'allocation_state_transition_time': {'readonly': True}, - 'errors': {'readonly': True}, - 'current_node_count': {'readonly': True}, - 'target_node_count': {'readonly': True}, - 'node_state_counts': {'readonly': True}, + "allocation_state": {"readonly": True}, + "allocation_state_transition_time": {"readonly": True}, + "errors": {"readonly": True}, + "current_node_count": {"readonly": True}, + "target_node_count": {"readonly": True}, + "node_state_counts": {"readonly": True}, } _attribute_map = { - 'os_type': {'key': 'osType', 'type': 'str'}, - 'vm_size': {'key': 'vmSize', 'type': 'str'}, - 'vm_priority': {'key': 'vmPriority', 'type': 'str'}, - 'virtual_machine_image': {'key': 'virtualMachineImage', 'type': 'VirtualMachineImage'}, - 'isolated_network': {'key': 'isolatedNetwork', 'type': 'bool'}, - 'scale_settings': {'key': 'scaleSettings', 'type': 'ScaleSettings'}, - 'user_account_credentials': {'key': 'userAccountCredentials', 'type': 'UserAccountCredentials'}, - 'subnet': {'key': 'subnet', 'type': 'ResourceId'}, - 'remote_login_port_public_access': {'key': 'remoteLoginPortPublicAccess', 'type': 'str'}, - 'allocation_state': {'key': 'allocationState', 'type': 'str'}, - 'allocation_state_transition_time': {'key': 'allocationStateTransitionTime', 'type': 'iso-8601'}, - 'errors': {'key': 'errors', 'type': '[ErrorResponse]'}, - 'current_node_count': {'key': 'currentNodeCount', 'type': 'int'}, - 'target_node_count': {'key': 'targetNodeCount', 'type': 'int'}, - 'node_state_counts': {'key': 'nodeStateCounts', 'type': 'NodeStateCounts'}, - 'enable_node_public_ip': {'key': 'enableNodePublicIp', 'type': 'bool'}, - 'property_bag': {'key': 'propertyBag', 'type': 'object'}, + "os_type": {"key": "osType", "type": "str"}, + "vm_size": {"key": "vmSize", "type": "str"}, + "vm_priority": {"key": "vmPriority", "type": "str"}, + "virtual_machine_image": {"key": "virtualMachineImage", "type": "VirtualMachineImage"}, + "isolated_network": {"key": "isolatedNetwork", "type": "bool"}, + "scale_settings": {"key": "scaleSettings", "type": "ScaleSettings"}, + "user_account_credentials": {"key": "userAccountCredentials", "type": "UserAccountCredentials"}, + "subnet": {"key": "subnet", "type": "ResourceId"}, + "remote_login_port_public_access": {"key": "remoteLoginPortPublicAccess", "type": "str"}, + "allocation_state": {"key": "allocationState", "type": "str"}, + "allocation_state_transition_time": {"key": "allocationStateTransitionTime", "type": "iso-8601"}, + "errors": {"key": "errors", "type": "[ErrorResponse]"}, + "current_node_count": {"key": "currentNodeCount", "type": "int"}, + "target_node_count": {"key": "targetNodeCount", "type": "int"}, + "node_state_counts": {"key": "nodeStateCounts", "type": "NodeStateCounts"}, + "enable_node_public_ip": {"key": "enableNodePublicIp", "type": "bool"}, + "property_bag": {"key": "propertyBag", "type": "object"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword os_type: Compute OS Type. Possible values include: "Linux", "Windows". Default value: "Linux". @@ -1822,23 +1746,23 @@ def __init__( :paramtype property_bag: any """ super(AmlComputeProperties, self).__init__(**kwargs) - self.os_type = kwargs.get('os_type', "Linux") - self.vm_size = kwargs.get('vm_size', None) - self.vm_priority = kwargs.get('vm_priority', None) - self.virtual_machine_image = kwargs.get('virtual_machine_image', None) - self.isolated_network = kwargs.get('isolated_network', None) - self.scale_settings = kwargs.get('scale_settings', None) - self.user_account_credentials = kwargs.get('user_account_credentials', None) - self.subnet = kwargs.get('subnet', None) - self.remote_login_port_public_access = kwargs.get('remote_login_port_public_access', "NotSpecified") + self.os_type = kwargs.get("os_type", "Linux") + self.vm_size = kwargs.get("vm_size", None) + self.vm_priority = kwargs.get("vm_priority", None) + self.virtual_machine_image = kwargs.get("virtual_machine_image", None) + self.isolated_network = kwargs.get("isolated_network", None) + self.scale_settings = kwargs.get("scale_settings", None) + self.user_account_credentials = kwargs.get("user_account_credentials", None) + self.subnet = kwargs.get("subnet", None) + self.remote_login_port_public_access = kwargs.get("remote_login_port_public_access", "NotSpecified") self.allocation_state = None self.allocation_state_transition_time = None self.errors = None self.current_node_count = None self.target_node_count = None self.node_state_counts = None - self.enable_node_public_ip = kwargs.get('enable_node_public_ip', True) - self.property_bag = kwargs.get('property_bag', None) + self.enable_node_public_ip = kwargs.get("enable_node_public_ip", True) + self.property_bag = kwargs.get("property_bag", None) class IdentityConfiguration(msrest.serialization.Model): @@ -1856,23 +1780,19 @@ class IdentityConfiguration(msrest.serialization.Model): """ _validation = { - 'identity_type': {'required': True}, + "identity_type": {"required": True}, } _attribute_map = { - 'identity_type': {'key': 'identityType', 'type': 'str'}, + "identity_type": {"key": "identityType", "type": "str"}, } _subtype_map = { - 'identity_type': {'AMLToken': 'AmlToken', 'Managed': 'ManagedIdentity', 'UserIdentity': 'UserIdentity'} + "identity_type": {"AMLToken": "AmlToken", "Managed": "ManagedIdentity", "UserIdentity": "UserIdentity"} } - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(IdentityConfiguration, self).__init__(**kwargs) self.identity_type = None # type: Optional[str] @@ -1889,21 +1809,17 @@ class AmlToken(IdentityConfiguration): """ _validation = { - 'identity_type': {'required': True}, + "identity_type": {"required": True}, } _attribute_map = { - 'identity_type': {'key': 'identityType', 'type': 'str'}, + "identity_type": {"key": "identityType", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(AmlToken, self).__init__(**kwargs) - self.identity_type = 'AMLToken' # type: str + self.identity_type = "AMLToken" # type: str class MonitorComputeIdentityBase(msrest.serialization.Model): @@ -1921,23 +1837,19 @@ class MonitorComputeIdentityBase(msrest.serialization.Model): """ _validation = { - 'compute_identity_type': {'required': True}, + "compute_identity_type": {"required": True}, } _attribute_map = { - 'compute_identity_type': {'key': 'computeIdentityType', 'type': 'str'}, + "compute_identity_type": {"key": "computeIdentityType", "type": "str"}, } _subtype_map = { - 'compute_identity_type': {'AmlToken': 'AmlTokenComputeIdentity', 'ManagedIdentity': 'ManagedComputeIdentity'} + "compute_identity_type": {"AmlToken": "AmlTokenComputeIdentity", "ManagedIdentity": "ManagedComputeIdentity"} } - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(MonitorComputeIdentityBase, self).__init__(**kwargs) self.compute_identity_type = None # type: Optional[str] @@ -1954,21 +1866,17 @@ class AmlTokenComputeIdentity(MonitorComputeIdentityBase): """ _validation = { - 'compute_identity_type': {'required': True}, + "compute_identity_type": {"required": True}, } _attribute_map = { - 'compute_identity_type': {'key': 'computeIdentityType', 'type': 'str'}, + "compute_identity_type": {"key": "computeIdentityType", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(AmlTokenComputeIdentity, self).__init__(**kwargs) - self.compute_identity_type = 'AmlToken' # type: str + self.compute_identity_type = "AmlToken" # type: str class AmlUserFeature(msrest.serialization.Model): @@ -1983,15 +1891,12 @@ class AmlUserFeature(msrest.serialization.Model): """ _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, + "id": {"key": "id", "type": "str"}, + "display_name": {"key": "displayName", "type": "str"}, + "description": {"key": "description", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword id: Specifies the feature ID. :paramtype id: str @@ -2001,9 +1906,9 @@ def __init__( :paramtype description: str """ super(AmlUserFeature, self).__init__(**kwargs) - self.id = kwargs.get('id', None) - self.display_name = kwargs.get('display_name', None) - self.description = kwargs.get('description', None) + self.id = kwargs.get("id", None) + self.display_name = kwargs.get("display_name", None) + self.description = kwargs.get("description", None) class DataReferenceCredential(msrest.serialization.Model): @@ -2022,24 +1927,24 @@ class DataReferenceCredential(msrest.serialization.Model): """ _validation = { - 'credential_type': {'required': True}, + "credential_type": {"required": True}, } _attribute_map = { - 'credential_type': {'key': 'credentialType', 'type': 'str'}, + "credential_type": {"key": "credentialType", "type": "str"}, } _subtype_map = { - 'credential_type': {'DockerCredentials': 'DockerCredential', 'ManagedIdentity': 'ManagedIdentityCredential', - 'NoCredentials': 'AnonymousAccessCredential', 'SAS': 'SASCredential'} + "credential_type": { + "DockerCredentials": "DockerCredential", + "ManagedIdentity": "ManagedIdentityCredential", + "NoCredentials": "AnonymousAccessCredential", + "SAS": "SASCredential", + } } - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(DataReferenceCredential, self).__init__(**kwargs) self.credential_type = None # type: Optional[str] @@ -2057,113 +1962,106 @@ class AnonymousAccessCredential(DataReferenceCredential): """ _validation = { - 'credential_type': {'required': True}, + "credential_type": {"required": True}, } _attribute_map = { - 'credential_type': {'key': 'credentialType', 'type': 'str'}, + "credential_type": {"key": "credentialType", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(AnonymousAccessCredential, self).__init__(**kwargs) - self.credential_type = 'NoCredentials' # type: str + self.credential_type = "NoCredentials" # type: str class ApiKeyAuthWorkspaceConnectionProperties(WorkspaceConnectionPropertiesV2): """This connection type covers the generic ApiKey auth connection categories, for examples: -AzureOpenAI: - Category:= AzureOpenAI - AuthType:= ApiKey (as type discriminator) - Credentials:= {ApiKey} as Microsoft.MachineLearning.AccountRP.Contracts.WorkspaceConnection.ApiKey - Target:= {ApiBase} - -CognitiveService: - Category:= CognitiveService - AuthType:= ApiKey (as type discriminator) - Credentials:= {SubscriptionKey} as Microsoft.MachineLearning.AccountRP.Contracts.WorkspaceConnection.ApiKey - Target:= ServiceRegion={serviceRegion} - -CognitiveSearch: - Category:= CognitiveSearch - AuthType:= ApiKey (as type discriminator) - Credentials:= {Key} as Microsoft.MachineLearning.AccountRP.Contracts.WorkspaceConnection.ApiKey - Target:= {Endpoint} - -Use Metadata property bag for ApiType, ApiVersion, Kind and other metadata fields. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar auth_type: Required. Authentication type of the connection target.Constant filled by - server. Possible values include: "PAT", "ManagedIdentity", "UsernamePassword", "None", "SAS", - "AccountKey", "ServicePrincipal", "AccessKey", "ApiKey", "CustomKeys", "OAuth2", "AAD". - :vartype auth_type: str or ~azure.mgmt.machinelearningservices.models.ConnectionAuthType - :ivar category: Category of the connection. Possible values include: "PythonFeed", - "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", - "AzureMySqlDb", "AzurePostgresDb", "ADLSGen2", "Redis", "ApiKey", "AzureOpenAI", - "CognitiveSearch", "CognitiveService", "CustomKeys", "AzureBlob", "AzureOneLake", "CosmosDb", - "CosmosDbMongoDbApi", "AzureDataExplorer", "AzureMariaDb", "AzureDatabricksDeltaLake", - "AzureSqlMi", "AzureTableStorage", "AmazonRdsForOracle", "AmazonRdsForSqlServer", - "AmazonRedshift", "Db2", "Drill", "GoogleBigQuery", "Greenplum", "Hbase", "Hive", "Impala", - "Informix", "MariaDb", "MicrosoftAccess", "MySql", "Netezza", "Oracle", "Phoenix", - "PostgreSql", "Presto", "SapOpenHub", "SapBw", "SapHana", "SapTable", "Spark", "SqlServer", - "Sybase", "Teradata", "Vertica", "Cassandra", "Couchbase", "MongoDbV2", "MongoDbAtlas", - "AmazonS3Compatible", "FileServer", "FtpServer", "GoogleCloudStorage", "Hdfs", - "OracleCloudStorage", "Sftp", "GenericHttp", "ODataRest", "Odbc", "GenericRest", "AmazonMws", - "Concur", "Dynamics", "DynamicsAx", "DynamicsCrm", "GoogleAdWords", "Hubspot", "Jira", - "Magento", "Marketo", "Office365", "Eloqua", "Responsys", "OracleServiceCloud", "PayPal", - "QuickBooks", "Salesforce", "SalesforceServiceCloud", "SalesforceMarketingCloud", - "SapCloudForCustomer", "SapEcc", "ServiceNow", "SharePointOnlineList", "Shopify", "Square", - "WebTable", "Xero", "Zoho", "GenericContainerRegistry". - :vartype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory - :ivar created_by_workspace_arm_id: - :vartype created_by_workspace_arm_id: str - :ivar expiry_time: - :vartype expiry_time: ~datetime.datetime - :ivar group: Group based on connection category. Possible values include: "Azure", "AzureAI", - "Database", "NoSQL", "File", "GenericProtocol", "ServicesAndApps". - :vartype group: str or ~azure.mgmt.machinelearningservices.models.ConnectionGroup - :ivar is_shared_to_all: - :vartype is_shared_to_all: bool - :ivar metadata: Any object. - :vartype metadata: any - :ivar shared_user_list: - :vartype shared_user_list: list[str] - :ivar target: - :vartype target: str - :ivar credentials: Api key object for workspace connection credential. - :vartype credentials: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionApiKey - """ - - _validation = { - 'auth_type': {'required': True}, - 'created_by_workspace_arm_id': {'readonly': True}, - 'group': {'readonly': True}, - } - - _attribute_map = { - 'auth_type': {'key': 'authType', 'type': 'str'}, - 'category': {'key': 'category', 'type': 'str'}, - 'created_by_workspace_arm_id': {'key': 'createdByWorkspaceArmId', 'type': 'str'}, - 'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'}, - 'group': {'key': 'group', 'type': 'str'}, - 'is_shared_to_all': {'key': 'isSharedToAll', 'type': 'bool'}, - 'metadata': {'key': 'metadata', 'type': 'object'}, - 'shared_user_list': {'key': 'sharedUserList', 'type': '[str]'}, - 'target': {'key': 'target', 'type': 'str'}, - 'credentials': {'key': 'credentials', 'type': 'WorkspaceConnectionApiKey'}, - } - - def __init__( - self, - **kwargs - ): + AzureOpenAI: + Category:= AzureOpenAI + AuthType:= ApiKey (as type discriminator) + Credentials:= {ApiKey} as Microsoft.MachineLearning.AccountRP.Contracts.WorkspaceConnection.ApiKey + Target:= {ApiBase} + + CognitiveService: + Category:= CognitiveService + AuthType:= ApiKey (as type discriminator) + Credentials:= {SubscriptionKey} as Microsoft.MachineLearning.AccountRP.Contracts.WorkspaceConnection.ApiKey + Target:= ServiceRegion={serviceRegion} + + CognitiveSearch: + Category:= CognitiveSearch + AuthType:= ApiKey (as type discriminator) + Credentials:= {Key} as Microsoft.MachineLearning.AccountRP.Contracts.WorkspaceConnection.ApiKey + Target:= {Endpoint} + + Use Metadata property bag for ApiType, ApiVersion, Kind and other metadata fields. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar auth_type: Required. Authentication type of the connection target.Constant filled by + server. Possible values include: "PAT", "ManagedIdentity", "UsernamePassword", "None", "SAS", + "AccountKey", "ServicePrincipal", "AccessKey", "ApiKey", "CustomKeys", "OAuth2", "AAD". + :vartype auth_type: str or ~azure.mgmt.machinelearningservices.models.ConnectionAuthType + :ivar category: Category of the connection. Possible values include: "PythonFeed", + "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", + "AzureMySqlDb", "AzurePostgresDb", "ADLSGen2", "Redis", "ApiKey", "AzureOpenAI", + "CognitiveSearch", "CognitiveService", "CustomKeys", "AzureBlob", "AzureOneLake", "CosmosDb", + "CosmosDbMongoDbApi", "AzureDataExplorer", "AzureMariaDb", "AzureDatabricksDeltaLake", + "AzureSqlMi", "AzureTableStorage", "AmazonRdsForOracle", "AmazonRdsForSqlServer", + "AmazonRedshift", "Db2", "Drill", "GoogleBigQuery", "Greenplum", "Hbase", "Hive", "Impala", + "Informix", "MariaDb", "MicrosoftAccess", "MySql", "Netezza", "Oracle", "Phoenix", + "PostgreSql", "Presto", "SapOpenHub", "SapBw", "SapHana", "SapTable", "Spark", "SqlServer", + "Sybase", "Teradata", "Vertica", "Cassandra", "Couchbase", "MongoDbV2", "MongoDbAtlas", + "AmazonS3Compatible", "FileServer", "FtpServer", "GoogleCloudStorage", "Hdfs", + "OracleCloudStorage", "Sftp", "GenericHttp", "ODataRest", "Odbc", "GenericRest", "AmazonMws", + "Concur", "Dynamics", "DynamicsAx", "DynamicsCrm", "GoogleAdWords", "Hubspot", "Jira", + "Magento", "Marketo", "Office365", "Eloqua", "Responsys", "OracleServiceCloud", "PayPal", + "QuickBooks", "Salesforce", "SalesforceServiceCloud", "SalesforceMarketingCloud", + "SapCloudForCustomer", "SapEcc", "ServiceNow", "SharePointOnlineList", "Shopify", "Square", + "WebTable", "Xero", "Zoho", "GenericContainerRegistry". + :vartype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory + :ivar created_by_workspace_arm_id: + :vartype created_by_workspace_arm_id: str + :ivar expiry_time: + :vartype expiry_time: ~datetime.datetime + :ivar group: Group based on connection category. Possible values include: "Azure", "AzureAI", + "Database", "NoSQL", "File", "GenericProtocol", "ServicesAndApps". + :vartype group: str or ~azure.mgmt.machinelearningservices.models.ConnectionGroup + :ivar is_shared_to_all: + :vartype is_shared_to_all: bool + :ivar metadata: Any object. + :vartype metadata: any + :ivar shared_user_list: + :vartype shared_user_list: list[str] + :ivar target: + :vartype target: str + :ivar credentials: Api key object for workspace connection credential. + :vartype credentials: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionApiKey + """ + + _validation = { + "auth_type": {"required": True}, + "created_by_workspace_arm_id": {"readonly": True}, + "group": {"readonly": True}, + } + + _attribute_map = { + "auth_type": {"key": "authType", "type": "str"}, + "category": {"key": "category", "type": "str"}, + "created_by_workspace_arm_id": {"key": "createdByWorkspaceArmId", "type": "str"}, + "expiry_time": {"key": "expiryTime", "type": "iso-8601"}, + "group": {"key": "group", "type": "str"}, + "is_shared_to_all": {"key": "isSharedToAll", "type": "bool"}, + "metadata": {"key": "metadata", "type": "object"}, + "shared_user_list": {"key": "sharedUserList", "type": "[str]"}, + "target": {"key": "target", "type": "str"}, + "credentials": {"key": "credentials", "type": "WorkspaceConnectionApiKey"}, + } + + def __init__(self, **kwargs): """ :keyword category: Category of the connection. Possible values include: "PythonFeed", "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", @@ -2197,8 +2095,8 @@ def __init__( :paramtype credentials: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionApiKey """ super(ApiKeyAuthWorkspaceConnectionProperties, self).__init__(**kwargs) - self.auth_type = 'ApiKey' # type: str - self.credentials = kwargs.get('credentials', None) + self.auth_type = "ApiKey" # type: str + self.credentials = kwargs.get("credentials", None) class ArmResourceId(msrest.serialization.Model): @@ -2212,13 +2110,10 @@ class ArmResourceId(msrest.serialization.Model): """ _attribute_map = { - 'resource_id': {'key': 'resourceId', 'type': 'str'}, + "resource_id": {"key": "resourceId", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword resource_id: Arm ResourceId is in the format "/subscriptions/{SubscriptionId}/resourceGroups/{ResourceGroupName}/providers/Microsoft.Storage/storageAccounts/{StorageAccountName}" @@ -2227,7 +2122,7 @@ def __init__( :paramtype resource_id: str """ super(ArmResourceId, self).__init__(**kwargs) - self.resource_id = kwargs.get('resource_id', None) + self.resource_id = kwargs.get("resource_id", None) class ResourceBase(msrest.serialization.Model): @@ -2242,15 +2137,12 @@ class ResourceBase(msrest.serialization.Model): """ _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword description: The asset description text. :paramtype description: str @@ -2260,9 +2152,9 @@ def __init__( :paramtype tags: dict[str, str] """ super(ResourceBase, self).__init__(**kwargs) - self.description = kwargs.get('description', None) - self.properties = kwargs.get('properties', None) - self.tags = kwargs.get('tags', None) + self.description = kwargs.get("description", None) + self.properties = kwargs.get("properties", None) + self.tags = kwargs.get("tags", None) class AssetBase(ResourceBase): @@ -2285,18 +2177,15 @@ class AssetBase(ResourceBase): """ _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'auto_delete_setting': {'key': 'autoDeleteSetting', 'type': 'AutoDeleteSetting'}, - 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "auto_delete_setting": {"key": "autoDeleteSetting", "type": "AutoDeleteSetting"}, + "is_anonymous": {"key": "isAnonymous", "type": "bool"}, + "is_archived": {"key": "isArchived", "type": "bool"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword description: The asset description text. :paramtype description: str @@ -2314,9 +2203,9 @@ def __init__( :paramtype is_archived: bool """ super(AssetBase, self).__init__(**kwargs) - self.auto_delete_setting = kwargs.get('auto_delete_setting', None) - self.is_anonymous = kwargs.get('is_anonymous', False) - self.is_archived = kwargs.get('is_archived', False) + self.auto_delete_setting = kwargs.get("auto_delete_setting", None) + self.is_anonymous = kwargs.get("is_anonymous", False) + self.is_archived = kwargs.get("is_archived", False) class AssetContainer(ResourceBase): @@ -2339,23 +2228,20 @@ class AssetContainer(ResourceBase): """ _validation = { - 'latest_version': {'readonly': True}, - 'next_version': {'readonly': True}, + "latest_version": {"readonly": True}, + "next_version": {"readonly": True}, } _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'latest_version': {'key': 'latestVersion', 'type': 'str'}, - 'next_version': {'key': 'nextVersion', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "latest_version": {"key": "latestVersion", "type": "str"}, + "next_version": {"key": "nextVersion", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword description: The asset description text. :paramtype description: str @@ -2367,7 +2253,7 @@ def __init__( :paramtype is_archived: bool """ super(AssetContainer, self).__init__(**kwargs) - self.is_archived = kwargs.get('is_archived', False) + self.is_archived = kwargs.get("is_archived", False) self.latest_version = None self.next_version = None @@ -2387,19 +2273,16 @@ class AssetJobInput(msrest.serialization.Model): """ _validation = { - 'uri': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "uri": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - 'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'}, - 'uri': {'key': 'uri', 'type': 'str'}, + "mode": {"key": "mode", "type": "str"}, + "path_on_compute": {"key": "pathOnCompute", "type": "str"}, + "uri": {"key": "uri", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword mode: Input Asset Delivery Mode. Possible values include: "ReadOnlyMount", "ReadWriteMount", "Download", "Direct", "EvalMount", "EvalDownload". @@ -2410,9 +2293,9 @@ def __init__( :paramtype uri: str """ super(AssetJobInput, self).__init__(**kwargs) - self.mode = kwargs.get('mode', None) - self.path_on_compute = kwargs.get('path_on_compute', None) - self.uri = kwargs['uri'] + self.mode = kwargs.get("mode", None) + self.path_on_compute = kwargs.get("path_on_compute", None) + self.uri = kwargs["uri"] class AssetJobOutput(msrest.serialization.Model): @@ -2434,18 +2317,15 @@ class AssetJobOutput(msrest.serialization.Model): """ _attribute_map = { - 'asset_name': {'key': 'assetName', 'type': 'str'}, - 'asset_version': {'key': 'assetVersion', 'type': 'str'}, - 'auto_delete_setting': {'key': 'autoDeleteSetting', 'type': 'AutoDeleteSetting'}, - 'mode': {'key': 'mode', 'type': 'str'}, - 'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'}, - 'uri': {'key': 'uri', 'type': 'str'}, + "asset_name": {"key": "assetName", "type": "str"}, + "asset_version": {"key": "assetVersion", "type": "str"}, + "auto_delete_setting": {"key": "autoDeleteSetting", "type": "AutoDeleteSetting"}, + "mode": {"key": "mode", "type": "str"}, + "path_on_compute": {"key": "pathOnCompute", "type": "str"}, + "uri": {"key": "uri", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword asset_name: Output Asset Name. :paramtype asset_name: str @@ -2462,12 +2342,12 @@ def __init__( :paramtype uri: str """ super(AssetJobOutput, self).__init__(**kwargs) - self.asset_name = kwargs.get('asset_name', None) - self.asset_version = kwargs.get('asset_version', None) - self.auto_delete_setting = kwargs.get('auto_delete_setting', None) - self.mode = kwargs.get('mode', None) - self.path_on_compute = kwargs.get('path_on_compute', None) - self.uri = kwargs.get('uri', None) + self.asset_name = kwargs.get("asset_name", None) + self.asset_version = kwargs.get("asset_version", None) + self.auto_delete_setting = kwargs.get("auto_delete_setting", None) + self.mode = kwargs.get("mode", None) + self.path_on_compute = kwargs.get("path_on_compute", None) + self.uri = kwargs.get("uri", None) class AssetReferenceBase(msrest.serialization.Model): @@ -2484,24 +2364,23 @@ class AssetReferenceBase(msrest.serialization.Model): """ _validation = { - 'reference_type': {'required': True}, + "reference_type": {"required": True}, } _attribute_map = { - 'reference_type': {'key': 'referenceType', 'type': 'str'}, + "reference_type": {"key": "referenceType", "type": "str"}, } _subtype_map = { - 'reference_type': {'DataPath': 'DataPathAssetReference', 'Id': 'IdAssetReference', - 'OutputPath': 'OutputPathAssetReference'} + "reference_type": { + "DataPath": "DataPathAssetReference", + "Id": "IdAssetReference", + "OutputPath": "OutputPathAssetReference", + } } - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(AssetReferenceBase, self).__init__(**kwargs) self.reference_type = None # type: Optional[str] @@ -2518,19 +2397,16 @@ class AssignedUser(msrest.serialization.Model): """ _validation = { - 'object_id': {'required': True}, - 'tenant_id': {'required': True}, + "object_id": {"required": True}, + "tenant_id": {"required": True}, } _attribute_map = { - 'object_id': {'key': 'objectId', 'type': 'str'}, - 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + "object_id": {"key": "objectId", "type": "str"}, + "tenant_id": {"key": "tenantId", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword object_id: Required. User’s AAD Object Id. :paramtype object_id: str @@ -2538,8 +2414,8 @@ def __init__( :paramtype tenant_id: str """ super(AssignedUser, self).__init__(**kwargs) - self.object_id = kwargs['object_id'] - self.tenant_id = kwargs['tenant_id'] + self.object_id = kwargs["object_id"] + self.tenant_id = kwargs["tenant_id"] class AutoDeleteSetting(msrest.serialization.Model): @@ -2553,14 +2429,11 @@ class AutoDeleteSetting(msrest.serialization.Model): """ _attribute_map = { - 'condition': {'key': 'condition', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'str'}, + "condition": {"key": "condition", "type": "str"}, + "value": {"key": "value", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword condition: When to check if an asset is expired. Possible values include: "CreatedGreaterThan", "LastAccessedGreaterThan". @@ -2569,8 +2442,8 @@ def __init__( :paramtype value: str """ super(AutoDeleteSetting, self).__init__(**kwargs) - self.condition = kwargs.get('condition', None) - self.value = kwargs.get('value', None) + self.condition = kwargs.get("condition", None) + self.value = kwargs.get("value", None) class ForecastHorizon(msrest.serialization.Model): @@ -2587,23 +2460,17 @@ class ForecastHorizon(msrest.serialization.Model): """ _validation = { - 'mode': {'required': True}, + "mode": {"required": True}, } _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, + "mode": {"key": "mode", "type": "str"}, } - _subtype_map = { - 'mode': {'Auto': 'AutoForecastHorizon', 'Custom': 'CustomForecastHorizon'} - } + _subtype_map = {"mode": {"Auto": "AutoForecastHorizon", "Custom": "CustomForecastHorizon"}} - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(ForecastHorizon, self).__init__(**kwargs) self.mode = None # type: Optional[str] @@ -2619,21 +2486,17 @@ class AutoForecastHorizon(ForecastHorizon): """ _validation = { - 'mode': {'required': True}, + "mode": {"required": True}, } _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, + "mode": {"key": "mode", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(AutoForecastHorizon, self).__init__(**kwargs) - self.mode = 'Auto' # type: str + self.mode = "Auto" # type: str class AutologgerSettings(msrest.serialization.Model): @@ -2648,17 +2511,14 @@ class AutologgerSettings(msrest.serialization.Model): """ _validation = { - 'mlflow_autologger': {'required': True}, + "mlflow_autologger": {"required": True}, } _attribute_map = { - 'mlflow_autologger': {'key': 'mlflowAutologger', 'type': 'str'}, + "mlflow_autologger": {"key": "mlflowAutologger", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword mlflow_autologger: Required. [Required] Indicates whether mlflow autologger is enabled. Possible values include: "Enabled", "Disabled". @@ -2666,7 +2526,7 @@ def __init__( ~azure.mgmt.machinelearningservices.models.MLFlowAutologgerState """ super(AutologgerSettings, self).__init__(**kwargs) - self.mlflow_autologger = kwargs['mlflow_autologger'] + self.mlflow_autologger = kwargs["mlflow_autologger"] class JobBaseProperties(ResourceBase): @@ -2719,37 +2579,40 @@ class JobBaseProperties(ResourceBase): """ _validation = { - 'job_type': {'required': True}, - 'status': {'readonly': True}, + "job_type": {"required": True}, + "status": {"readonly": True}, } _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'component_id': {'key': 'componentId', 'type': 'str'}, - 'compute_id': {'key': 'computeId', 'type': 'str'}, - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'experiment_name': {'key': 'experimentName', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'IdentityConfiguration'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'job_type': {'key': 'jobType', 'type': 'str'}, - 'notification_setting': {'key': 'notificationSetting', 'type': 'NotificationSetting'}, - 'secrets_configuration': {'key': 'secretsConfiguration', 'type': '{SecretConfiguration}'}, - 'services': {'key': 'services', 'type': '{JobService}'}, - 'status': {'key': 'status', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "component_id": {"key": "componentId", "type": "str"}, + "compute_id": {"key": "computeId", "type": "str"}, + "display_name": {"key": "displayName", "type": "str"}, + "experiment_name": {"key": "experimentName", "type": "str"}, + "identity": {"key": "identity", "type": "IdentityConfiguration"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "job_type": {"key": "jobType", "type": "str"}, + "notification_setting": {"key": "notificationSetting", "type": "NotificationSetting"}, + "secrets_configuration": {"key": "secretsConfiguration", "type": "{SecretConfiguration}"}, + "services": {"key": "services", "type": "{JobService}"}, + "status": {"key": "status", "type": "str"}, } _subtype_map = { - 'job_type': {'AutoML': 'AutoMLJob', 'Command': 'CommandJob', 'FineTuning': 'FineTuningJob', - 'Labeling': 'LabelingJobProperties', 'Pipeline': 'PipelineJob', 'Spark': 'SparkJob', - 'Sweep': 'SweepJob'} + "job_type": { + "AutoML": "AutoMLJob", + "Command": "CommandJob", + "FineTuning": "FineTuningJob", + "Labeling": "LabelingJobProperties", + "Pipeline": "PipelineJob", + "Spark": "SparkJob", + "Sweep": "SweepJob", + } } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword description: The asset description text. :paramtype description: str @@ -2782,115 +2645,112 @@ def __init__( :paramtype services: dict[str, ~azure.mgmt.machinelearningservices.models.JobService] """ super(JobBaseProperties, self).__init__(**kwargs) - self.component_id = kwargs.get('component_id', None) - self.compute_id = kwargs.get('compute_id', None) - self.display_name = kwargs.get('display_name', None) - self.experiment_name = kwargs.get('experiment_name', "Default") - self.identity = kwargs.get('identity', None) - self.is_archived = kwargs.get('is_archived', False) - self.job_type = 'JobBaseProperties' # type: str - self.notification_setting = kwargs.get('notification_setting', None) - self.secrets_configuration = kwargs.get('secrets_configuration', None) - self.services = kwargs.get('services', None) + self.component_id = kwargs.get("component_id", None) + self.compute_id = kwargs.get("compute_id", None) + self.display_name = kwargs.get("display_name", None) + self.experiment_name = kwargs.get("experiment_name", "Default") + self.identity = kwargs.get("identity", None) + self.is_archived = kwargs.get("is_archived", False) + self.job_type = "JobBaseProperties" # type: str + self.notification_setting = kwargs.get("notification_setting", None) + self.secrets_configuration = kwargs.get("secrets_configuration", None) + self.services = kwargs.get("services", None) self.status = None class AutoMLJob(JobBaseProperties): """AutoMLJob class. -Use this class for executing AutoML tasks like Classification/Regression etc. -See TaskType enum for all the tasks supported. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar description: The asset description text. - :vartype description: str - :ivar properties: The asset property dictionary. - :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :vartype tags: dict[str, str] - :ivar component_id: ARM resource ID of the component resource. - :vartype component_id: str - :ivar compute_id: ARM resource ID of the compute resource. - :vartype compute_id: str - :ivar display_name: Display name of job. - :vartype display_name: str - :ivar experiment_name: The name of the experiment the job belongs to. If not set, the job is - placed in the "Default" experiment. - :vartype experiment_name: str - :ivar identity: Identity configuration. If set, this should be one of AmlToken, - ManagedIdentity, UserIdentity or null. - Defaults to AmlToken if null. - :vartype identity: ~azure.mgmt.machinelearningservices.models.IdentityConfiguration - :ivar is_archived: Is the asset archived?. - :vartype is_archived: bool - :ivar job_type: Required. [Required] Specifies the type of job.Constant filled by server. - Possible values include: "AutoML", "Command", "Labeling", "Sweep", "Pipeline", "Spark", - "FineTuning". - :vartype job_type: str or ~azure.mgmt.machinelearningservices.models.JobType - :ivar notification_setting: Notification setting for the job. - :vartype notification_setting: ~azure.mgmt.machinelearningservices.models.NotificationSetting - :ivar secrets_configuration: Configuration for secrets to be made available during runtime. - :vartype secrets_configuration: dict[str, - ~azure.mgmt.machinelearningservices.models.SecretConfiguration] - :ivar services: List of JobEndpoints. - For local jobs, a job endpoint will have an endpoint value of FileStreamObject. - :vartype services: dict[str, ~azure.mgmt.machinelearningservices.models.JobService] - :ivar status: Status of the job. Possible values include: "NotStarted", "Starting", - "Provisioning", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", - "Failed", "Canceled", "NotResponding", "Paused", "Unknown", "Scheduled". - :vartype status: str or ~azure.mgmt.machinelearningservices.models.JobStatus - :ivar environment_id: The ARM resource ID of the Environment specification for the job. - This is optional value to provide, if not provided, AutoML will default this to Production - AutoML curated environment version when running the job. - :vartype environment_id: str - :ivar environment_variables: Environment variables included in the job. - :vartype environment_variables: dict[str, str] - :ivar outputs: Mapping of output data bindings used in the job. - :vartype outputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobOutput] - :ivar queue_settings: Queue settings for the job. - :vartype queue_settings: ~azure.mgmt.machinelearningservices.models.QueueSettings - :ivar resources: Compute Resource configuration for the job. - :vartype resources: ~azure.mgmt.machinelearningservices.models.JobResourceConfiguration - :ivar task_details: Required. [Required] This represents scenario which can be one of - Tables/NLP/Image. - :vartype task_details: ~azure.mgmt.machinelearningservices.models.AutoMLVertical + Use this class for executing AutoML tasks like Classification/Regression etc. + See TaskType enum for all the tasks supported. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar component_id: ARM resource ID of the component resource. + :vartype component_id: str + :ivar compute_id: ARM resource ID of the compute resource. + :vartype compute_id: str + :ivar display_name: Display name of job. + :vartype display_name: str + :ivar experiment_name: The name of the experiment the job belongs to. If not set, the job is + placed in the "Default" experiment. + :vartype experiment_name: str + :ivar identity: Identity configuration. If set, this should be one of AmlToken, + ManagedIdentity, UserIdentity or null. + Defaults to AmlToken if null. + :vartype identity: ~azure.mgmt.machinelearningservices.models.IdentityConfiguration + :ivar is_archived: Is the asset archived?. + :vartype is_archived: bool + :ivar job_type: Required. [Required] Specifies the type of job.Constant filled by server. + Possible values include: "AutoML", "Command", "Labeling", "Sweep", "Pipeline", "Spark", + "FineTuning". + :vartype job_type: str or ~azure.mgmt.machinelearningservices.models.JobType + :ivar notification_setting: Notification setting for the job. + :vartype notification_setting: ~azure.mgmt.machinelearningservices.models.NotificationSetting + :ivar secrets_configuration: Configuration for secrets to be made available during runtime. + :vartype secrets_configuration: dict[str, + ~azure.mgmt.machinelearningservices.models.SecretConfiguration] + :ivar services: List of JobEndpoints. + For local jobs, a job endpoint will have an endpoint value of FileStreamObject. + :vartype services: dict[str, ~azure.mgmt.machinelearningservices.models.JobService] + :ivar status: Status of the job. Possible values include: "NotStarted", "Starting", + "Provisioning", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", + "Failed", "Canceled", "NotResponding", "Paused", "Unknown", "Scheduled". + :vartype status: str or ~azure.mgmt.machinelearningservices.models.JobStatus + :ivar environment_id: The ARM resource ID of the Environment specification for the job. + This is optional value to provide, if not provided, AutoML will default this to Production + AutoML curated environment version when running the job. + :vartype environment_id: str + :ivar environment_variables: Environment variables included in the job. + :vartype environment_variables: dict[str, str] + :ivar outputs: Mapping of output data bindings used in the job. + :vartype outputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobOutput] + :ivar queue_settings: Queue settings for the job. + :vartype queue_settings: ~azure.mgmt.machinelearningservices.models.QueueSettings + :ivar resources: Compute Resource configuration for the job. + :vartype resources: ~azure.mgmt.machinelearningservices.models.JobResourceConfiguration + :ivar task_details: Required. [Required] This represents scenario which can be one of + Tables/NLP/Image. + :vartype task_details: ~azure.mgmt.machinelearningservices.models.AutoMLVertical """ _validation = { - 'job_type': {'required': True}, - 'status': {'readonly': True}, - 'task_details': {'required': True}, + "job_type": {"required": True}, + "status": {"readonly": True}, + "task_details": {"required": True}, } _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'component_id': {'key': 'componentId', 'type': 'str'}, - 'compute_id': {'key': 'computeId', 'type': 'str'}, - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'experiment_name': {'key': 'experimentName', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'IdentityConfiguration'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'job_type': {'key': 'jobType', 'type': 'str'}, - 'notification_setting': {'key': 'notificationSetting', 'type': 'NotificationSetting'}, - 'secrets_configuration': {'key': 'secretsConfiguration', 'type': '{SecretConfiguration}'}, - 'services': {'key': 'services', 'type': '{JobService}'}, - 'status': {'key': 'status', 'type': 'str'}, - 'environment_id': {'key': 'environmentId', 'type': 'str'}, - 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, - 'outputs': {'key': 'outputs', 'type': '{JobOutput}'}, - 'queue_settings': {'key': 'queueSettings', 'type': 'QueueSettings'}, - 'resources': {'key': 'resources', 'type': 'JobResourceConfiguration'}, - 'task_details': {'key': 'taskDetails', 'type': 'AutoMLVertical'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "component_id": {"key": "componentId", "type": "str"}, + "compute_id": {"key": "computeId", "type": "str"}, + "display_name": {"key": "displayName", "type": "str"}, + "experiment_name": {"key": "experimentName", "type": "str"}, + "identity": {"key": "identity", "type": "IdentityConfiguration"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "job_type": {"key": "jobType", "type": "str"}, + "notification_setting": {"key": "notificationSetting", "type": "NotificationSetting"}, + "secrets_configuration": {"key": "secretsConfiguration", "type": "{SecretConfiguration}"}, + "services": {"key": "services", "type": "{JobService}"}, + "status": {"key": "status", "type": "str"}, + "environment_id": {"key": "environmentId", "type": "str"}, + "environment_variables": {"key": "environmentVariables", "type": "{str}"}, + "outputs": {"key": "outputs", "type": "{JobOutput}"}, + "queue_settings": {"key": "queueSettings", "type": "QueueSettings"}, + "resources": {"key": "resources", "type": "JobResourceConfiguration"}, + "task_details": {"key": "taskDetails", "type": "AutoMLVertical"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword description: The asset description text. :paramtype description: str @@ -2938,65 +2798,67 @@ def __init__( :paramtype task_details: ~azure.mgmt.machinelearningservices.models.AutoMLVertical """ super(AutoMLJob, self).__init__(**kwargs) - self.job_type = 'AutoML' # type: str - self.environment_id = kwargs.get('environment_id', None) - self.environment_variables = kwargs.get('environment_variables', None) - self.outputs = kwargs.get('outputs', None) - self.queue_settings = kwargs.get('queue_settings', None) - self.resources = kwargs.get('resources', None) - self.task_details = kwargs['task_details'] + self.job_type = "AutoML" # type: str + self.environment_id = kwargs.get("environment_id", None) + self.environment_variables = kwargs.get("environment_variables", None) + self.outputs = kwargs.get("outputs", None) + self.queue_settings = kwargs.get("queue_settings", None) + self.resources = kwargs.get("resources", None) + self.task_details = kwargs["task_details"] class AutoMLVertical(msrest.serialization.Model): """AutoML vertical class. -Base class for AutoML verticals - TableVertical/ImageVertical/NLPVertical. + Base class for AutoML verticals - TableVertical/ImageVertical/NLPVertical. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: Classification, Forecasting, ImageClassification, ImageClassificationMultilabel, ImageInstanceSegmentation, ImageObjectDetection, Regression, TextClassification, TextClassificationMultilabel, TextNer. + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: Classification, Forecasting, ImageClassification, ImageClassificationMultilabel, ImageInstanceSegmentation, ImageObjectDetection, Regression, TextClassification, TextClassificationMultilabel, TextNer. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to Azure. - :ivar log_verbosity: Log verbosity for the job. Possible values include: "NotSet", "Debug", - "Info", "Warning", "Error", "Critical". - :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity - :ivar target_column_name: Target column name: This is prediction values column. - Also known as label column name in context of classification tasks. - :vartype target_column_name: str - :ivar task_type: Required. [Required] Task type for AutoMLJob.Constant filled by server. - Possible values include: "Classification", "Regression", "Forecasting", "ImageClassification", - "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", - "TextClassification", "TextClassificationMultilabel", "TextNER". - :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType - :ivar training_data: Required. [Required] Training data input. - :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar log_verbosity: Log verbosity for the job. Possible values include: "NotSet", "Debug", + "Info", "Warning", "Error", "Critical". + :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :ivar target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :vartype target_column_name: str + :ivar task_type: Required. [Required] Task type for AutoMLJob.Constant filled by server. + Possible values include: "Classification", "Regression", "Forecasting", "ImageClassification", + "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", + "TextClassification", "TextClassificationMultilabel", "TextNER". + :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType + :ivar training_data: Required. [Required] Training data input. + :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput """ _validation = { - 'task_type': {'required': True}, - 'training_data': {'required': True}, + "task_type": {"required": True}, + "training_data": {"required": True}, } _attribute_map = { - 'log_verbosity': {'key': 'logVerbosity', 'type': 'str'}, - 'target_column_name': {'key': 'targetColumnName', 'type': 'str'}, - 'task_type': {'key': 'taskType', 'type': 'str'}, - 'training_data': {'key': 'trainingData', 'type': 'MLTableJobInput'}, + "log_verbosity": {"key": "logVerbosity", "type": "str"}, + "target_column_name": {"key": "targetColumnName", "type": "str"}, + "task_type": {"key": "taskType", "type": "str"}, + "training_data": {"key": "trainingData", "type": "MLTableJobInput"}, } _subtype_map = { - 'task_type': {'Classification': 'Classification', 'Forecasting': 'Forecasting', - 'ImageClassification': 'ImageClassification', - 'ImageClassificationMultilabel': 'ImageClassificationMultilabel', - 'ImageInstanceSegmentation': 'ImageInstanceSegmentation', - 'ImageObjectDetection': 'ImageObjectDetection', 'Regression': 'Regression', - 'TextClassification': 'TextClassification', - 'TextClassificationMultilabel': 'TextClassificationMultilabel', 'TextNER': 'TextNer'} - } - - def __init__( - self, - **kwargs - ): + "task_type": { + "Classification": "Classification", + "Forecasting": "Forecasting", + "ImageClassification": "ImageClassification", + "ImageClassificationMultilabel": "ImageClassificationMultilabel", + "ImageInstanceSegmentation": "ImageInstanceSegmentation", + "ImageObjectDetection": "ImageObjectDetection", + "Regression": "Regression", + "TextClassification": "TextClassification", + "TextClassificationMultilabel": "TextClassificationMultilabel", + "TextNER": "TextNer", + } + } + + def __init__(self, **kwargs): """ :keyword log_verbosity: Log verbosity for the job. Possible values include: "NotSet", "Debug", "Info", "Warning", "Error", "Critical". @@ -3008,10 +2870,10 @@ def __init__( :paramtype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput """ super(AutoMLVertical, self).__init__(**kwargs) - self.log_verbosity = kwargs.get('log_verbosity', None) - self.target_column_name = kwargs.get('target_column_name', None) + self.log_verbosity = kwargs.get("log_verbosity", None) + self.target_column_name = kwargs.get("target_column_name", None) self.task_type = None # type: Optional[str] - self.training_data = kwargs['training_data'] + self.training_data = kwargs["training_data"] class NCrossValidations(msrest.serialization.Model): @@ -3028,23 +2890,17 @@ class NCrossValidations(msrest.serialization.Model): """ _validation = { - 'mode': {'required': True}, + "mode": {"required": True}, } _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, + "mode": {"key": "mode", "type": "str"}, } - _subtype_map = { - 'mode': {'Auto': 'AutoNCrossValidations', 'Custom': 'CustomNCrossValidations'} - } + _subtype_map = {"mode": {"Auto": "AutoNCrossValidations", "Custom": "CustomNCrossValidations"}} - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(NCrossValidations, self).__init__(**kwargs) self.mode = None # type: Optional[str] @@ -3060,21 +2916,17 @@ class AutoNCrossValidations(NCrossValidations): """ _validation = { - 'mode': {'required': True}, + "mode": {"required": True}, } _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, + "mode": {"key": "mode", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(AutoNCrossValidations, self).__init__(**kwargs) - self.mode = 'Auto' # type: str + self.mode = "Auto" # type: str class AutoPauseProperties(msrest.serialization.Model): @@ -3087,14 +2939,11 @@ class AutoPauseProperties(msrest.serialization.Model): """ _attribute_map = { - 'delay_in_minutes': {'key': 'delayInMinutes', 'type': 'int'}, - 'enabled': {'key': 'enabled', 'type': 'bool'}, + "delay_in_minutes": {"key": "delayInMinutes", "type": "int"}, + "enabled": {"key": "enabled", "type": "bool"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword delay_in_minutes: :paramtype delay_in_minutes: int @@ -3102,8 +2951,8 @@ def __init__( :paramtype enabled: bool """ super(AutoPauseProperties, self).__init__(**kwargs) - self.delay_in_minutes = kwargs.get('delay_in_minutes', None) - self.enabled = kwargs.get('enabled', None) + self.delay_in_minutes = kwargs.get("delay_in_minutes", None) + self.enabled = kwargs.get("enabled", None) class AutoScaleProperties(msrest.serialization.Model): @@ -3118,15 +2967,12 @@ class AutoScaleProperties(msrest.serialization.Model): """ _attribute_map = { - 'min_node_count': {'key': 'minNodeCount', 'type': 'int'}, - 'enabled': {'key': 'enabled', 'type': 'bool'}, - 'max_node_count': {'key': 'maxNodeCount', 'type': 'int'}, + "min_node_count": {"key": "minNodeCount", "type": "int"}, + "enabled": {"key": "enabled", "type": "bool"}, + "max_node_count": {"key": "maxNodeCount", "type": "int"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword min_node_count: :paramtype min_node_count: int @@ -3136,9 +2982,9 @@ def __init__( :paramtype max_node_count: int """ super(AutoScaleProperties, self).__init__(**kwargs) - self.min_node_count = kwargs.get('min_node_count', None) - self.enabled = kwargs.get('enabled', None) - self.max_node_count = kwargs.get('max_node_count', None) + self.min_node_count = kwargs.get("min_node_count", None) + self.enabled = kwargs.get("enabled", None) + self.max_node_count = kwargs.get("max_node_count", None) class Seasonality(msrest.serialization.Model): @@ -3155,23 +3001,17 @@ class Seasonality(msrest.serialization.Model): """ _validation = { - 'mode': {'required': True}, + "mode": {"required": True}, } _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, + "mode": {"key": "mode", "type": "str"}, } - _subtype_map = { - 'mode': {'Auto': 'AutoSeasonality', 'Custom': 'CustomSeasonality'} - } + _subtype_map = {"mode": {"Auto": "AutoSeasonality", "Custom": "CustomSeasonality"}} - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(Seasonality, self).__init__(**kwargs) self.mode = None # type: Optional[str] @@ -3187,21 +3027,17 @@ class AutoSeasonality(Seasonality): """ _validation = { - 'mode': {'required': True}, + "mode": {"required": True}, } _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, + "mode": {"key": "mode", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(AutoSeasonality, self).__init__(**kwargs) - self.mode = 'Auto' # type: str + self.mode = "Auto" # type: str class TargetLags(msrest.serialization.Model): @@ -3218,23 +3054,17 @@ class TargetLags(msrest.serialization.Model): """ _validation = { - 'mode': {'required': True}, + "mode": {"required": True}, } _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, + "mode": {"key": "mode", "type": "str"}, } - _subtype_map = { - 'mode': {'Auto': 'AutoTargetLags', 'Custom': 'CustomTargetLags'} - } + _subtype_map = {"mode": {"Auto": "AutoTargetLags", "Custom": "CustomTargetLags"}} - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(TargetLags, self).__init__(**kwargs) self.mode = None # type: Optional[str] @@ -3250,21 +3080,17 @@ class AutoTargetLags(TargetLags): """ _validation = { - 'mode': {'required': True}, + "mode": {"required": True}, } _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, + "mode": {"key": "mode", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(AutoTargetLags, self).__init__(**kwargs) - self.mode = 'Auto' # type: str + self.mode = "Auto" # type: str class TargetRollingWindowSize(msrest.serialization.Model): @@ -3281,23 +3107,17 @@ class TargetRollingWindowSize(msrest.serialization.Model): """ _validation = { - 'mode': {'required': True}, + "mode": {"required": True}, } _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, + "mode": {"key": "mode", "type": "str"}, } - _subtype_map = { - 'mode': {'Auto': 'AutoTargetRollingWindowSize', 'Custom': 'CustomTargetRollingWindowSize'} - } + _subtype_map = {"mode": {"Auto": "AutoTargetRollingWindowSize", "Custom": "CustomTargetRollingWindowSize"}} - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(TargetRollingWindowSize, self).__init__(**kwargs) self.mode = None # type: Optional[str] @@ -3313,21 +3133,17 @@ class AutoTargetRollingWindowSize(TargetRollingWindowSize): """ _validation = { - 'mode': {'required': True}, + "mode": {"required": True}, } _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, + "mode": {"key": "mode", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(AutoTargetRollingWindowSize, self).__init__(**kwargs) - self.mode = 'Auto' # type: str + self.mode = "Auto" # type: str class AzureDatastore(msrest.serialization.Model): @@ -3340,14 +3156,11 @@ class AzureDatastore(msrest.serialization.Model): """ _attribute_map = { - 'resource_group': {'key': 'resourceGroup', 'type': 'str'}, - 'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, + "resource_group": {"key": "resourceGroup", "type": "str"}, + "subscription_id": {"key": "subscriptionId", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword resource_group: Azure Resource Group name. :paramtype resource_group: str @@ -3355,8 +3168,8 @@ def __init__( :paramtype subscription_id: str """ super(AzureDatastore, self).__init__(**kwargs) - self.resource_group = kwargs.get('resource_group', None) - self.subscription_id = kwargs.get('subscription_id', None) + self.resource_group = kwargs.get("resource_group", None) + self.subscription_id = kwargs.get("subscription_id", None) class DatastoreProperties(ResourceBase): @@ -3389,31 +3202,33 @@ class DatastoreProperties(ResourceBase): """ _validation = { - 'credentials': {'required': True}, - 'datastore_type': {'required': True}, - 'is_default': {'readonly': True}, + "credentials": {"required": True}, + "datastore_type": {"required": True}, + "is_default": {"readonly": True}, } _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'credentials': {'key': 'credentials', 'type': 'DatastoreCredentials'}, - 'datastore_type': {'key': 'datastoreType', 'type': 'str'}, - 'intellectual_property': {'key': 'intellectualProperty', 'type': 'IntellectualProperty'}, - 'is_default': {'key': 'isDefault', 'type': 'bool'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "credentials": {"key": "credentials", "type": "DatastoreCredentials"}, + "datastore_type": {"key": "datastoreType", "type": "str"}, + "intellectual_property": {"key": "intellectualProperty", "type": "IntellectualProperty"}, + "is_default": {"key": "isDefault", "type": "bool"}, } _subtype_map = { - 'datastore_type': {'AzureBlob': 'AzureBlobDatastore', 'AzureDataLakeGen1': 'AzureDataLakeGen1Datastore', - 'AzureDataLakeGen2': 'AzureDataLakeGen2Datastore', 'AzureFile': 'AzureFileDatastore', - 'Hdfs': 'HdfsDatastore', 'OneLake': 'OneLakeDatastore'} + "datastore_type": { + "AzureBlob": "AzureBlobDatastore", + "AzureDataLakeGen1": "AzureDataLakeGen1Datastore", + "AzureDataLakeGen2": "AzureDataLakeGen2Datastore", + "AzureFile": "AzureFileDatastore", + "Hdfs": "HdfsDatastore", + "OneLake": "OneLakeDatastore", + } } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword description: The asset description text. :paramtype description: str @@ -3428,9 +3243,9 @@ def __init__( ~azure.mgmt.machinelearningservices.models.IntellectualProperty """ super(DatastoreProperties, self).__init__(**kwargs) - self.credentials = kwargs['credentials'] - self.datastore_type = 'DatastoreProperties' # type: str - self.intellectual_property = kwargs.get('intellectual_property', None) + self.credentials = kwargs["credentials"] + self.datastore_type = "DatastoreProperties" # type: str + self.intellectual_property = kwargs.get("intellectual_property", None) self.is_default = None @@ -3478,32 +3293,29 @@ class AzureBlobDatastore(DatastoreProperties, AzureDatastore): """ _validation = { - 'credentials': {'required': True}, - 'datastore_type': {'required': True}, - 'is_default': {'readonly': True}, + "credentials": {"required": True}, + "datastore_type": {"required": True}, + "is_default": {"readonly": True}, } _attribute_map = { - 'resource_group': {'key': 'resourceGroup', 'type': 'str'}, - 'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'credentials': {'key': 'credentials', 'type': 'DatastoreCredentials'}, - 'datastore_type': {'key': 'datastoreType', 'type': 'str'}, - 'intellectual_property': {'key': 'intellectualProperty', 'type': 'IntellectualProperty'}, - 'is_default': {'key': 'isDefault', 'type': 'bool'}, - 'account_name': {'key': 'accountName', 'type': 'str'}, - 'container_name': {'key': 'containerName', 'type': 'str'}, - 'endpoint': {'key': 'endpoint', 'type': 'str'}, - 'protocol': {'key': 'protocol', 'type': 'str'}, - 'service_data_access_auth_identity': {'key': 'serviceDataAccessAuthIdentity', 'type': 'str'}, + "resource_group": {"key": "resourceGroup", "type": "str"}, + "subscription_id": {"key": "subscriptionId", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "credentials": {"key": "credentials", "type": "DatastoreCredentials"}, + "datastore_type": {"key": "datastoreType", "type": "str"}, + "intellectual_property": {"key": "intellectualProperty", "type": "IntellectualProperty"}, + "is_default": {"key": "isDefault", "type": "bool"}, + "account_name": {"key": "accountName", "type": "str"}, + "container_name": {"key": "containerName", "type": "str"}, + "endpoint": {"key": "endpoint", "type": "str"}, + "protocol": {"key": "protocol", "type": "str"}, + "service_data_access_auth_identity": {"key": "serviceDataAccessAuthIdentity", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword resource_group: Azure Resource Group name. :paramtype resource_group: str @@ -3535,19 +3347,19 @@ def __init__( ~azure.mgmt.machinelearningservices.models.ServiceDataAccessAuthIdentity """ super(AzureBlobDatastore, self).__init__(**kwargs) - self.resource_group = kwargs.get('resource_group', None) - self.subscription_id = kwargs.get('subscription_id', None) - self.datastore_type = 'AzureBlob' # type: str - self.account_name = kwargs.get('account_name', None) - self.container_name = kwargs.get('container_name', None) - self.endpoint = kwargs.get('endpoint', None) - self.protocol = kwargs.get('protocol', None) - self.service_data_access_auth_identity = kwargs.get('service_data_access_auth_identity', None) - self.description = kwargs.get('description', None) - self.properties = kwargs.get('properties', None) - self.tags = kwargs.get('tags', None) - self.credentials = kwargs['credentials'] - self.intellectual_property = kwargs.get('intellectual_property', None) + self.resource_group = kwargs.get("resource_group", None) + self.subscription_id = kwargs.get("subscription_id", None) + self.datastore_type = "AzureBlob" # type: str + self.account_name = kwargs.get("account_name", None) + self.container_name = kwargs.get("container_name", None) + self.endpoint = kwargs.get("endpoint", None) + self.protocol = kwargs.get("protocol", None) + self.service_data_access_auth_identity = kwargs.get("service_data_access_auth_identity", None) + self.description = kwargs.get("description", None) + self.properties = kwargs.get("properties", None) + self.tags = kwargs.get("tags", None) + self.credentials = kwargs["credentials"] + self.intellectual_property = kwargs.get("intellectual_property", None) self.is_default = None @@ -3589,30 +3401,27 @@ class AzureDataLakeGen1Datastore(DatastoreProperties, AzureDatastore): """ _validation = { - 'credentials': {'required': True}, - 'datastore_type': {'required': True}, - 'is_default': {'readonly': True}, - 'store_name': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "credentials": {"required": True}, + "datastore_type": {"required": True}, + "is_default": {"readonly": True}, + "store_name": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'resource_group': {'key': 'resourceGroup', 'type': 'str'}, - 'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'credentials': {'key': 'credentials', 'type': 'DatastoreCredentials'}, - 'datastore_type': {'key': 'datastoreType', 'type': 'str'}, - 'intellectual_property': {'key': 'intellectualProperty', 'type': 'IntellectualProperty'}, - 'is_default': {'key': 'isDefault', 'type': 'bool'}, - 'service_data_access_auth_identity': {'key': 'serviceDataAccessAuthIdentity', 'type': 'str'}, - 'store_name': {'key': 'storeName', 'type': 'str'}, + "resource_group": {"key": "resourceGroup", "type": "str"}, + "subscription_id": {"key": "subscriptionId", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "credentials": {"key": "credentials", "type": "DatastoreCredentials"}, + "datastore_type": {"key": "datastoreType", "type": "str"}, + "intellectual_property": {"key": "intellectualProperty", "type": "IntellectualProperty"}, + "is_default": {"key": "isDefault", "type": "bool"}, + "service_data_access_auth_identity": {"key": "serviceDataAccessAuthIdentity", "type": "str"}, + "store_name": {"key": "storeName", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword resource_group: Azure Resource Group name. :paramtype resource_group: str @@ -3638,16 +3447,16 @@ def __init__( :paramtype store_name: str """ super(AzureDataLakeGen1Datastore, self).__init__(**kwargs) - self.resource_group = kwargs.get('resource_group', None) - self.subscription_id = kwargs.get('subscription_id', None) - self.datastore_type = 'AzureDataLakeGen1' # type: str - self.service_data_access_auth_identity = kwargs.get('service_data_access_auth_identity', None) - self.store_name = kwargs['store_name'] - self.description = kwargs.get('description', None) - self.properties = kwargs.get('properties', None) - self.tags = kwargs.get('tags', None) - self.credentials = kwargs['credentials'] - self.intellectual_property = kwargs.get('intellectual_property', None) + self.resource_group = kwargs.get("resource_group", None) + self.subscription_id = kwargs.get("subscription_id", None) + self.datastore_type = "AzureDataLakeGen1" # type: str + self.service_data_access_auth_identity = kwargs.get("service_data_access_auth_identity", None) + self.store_name = kwargs["store_name"] + self.description = kwargs.get("description", None) + self.properties = kwargs.get("properties", None) + self.tags = kwargs.get("tags", None) + self.credentials = kwargs["credentials"] + self.intellectual_property = kwargs.get("intellectual_property", None) self.is_default = None @@ -3695,34 +3504,31 @@ class AzureDataLakeGen2Datastore(DatastoreProperties, AzureDatastore): """ _validation = { - 'credentials': {'required': True}, - 'datastore_type': {'required': True}, - 'is_default': {'readonly': True}, - 'account_name': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'filesystem': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "credentials": {"required": True}, + "datastore_type": {"required": True}, + "is_default": {"readonly": True}, + "account_name": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "filesystem": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'resource_group': {'key': 'resourceGroup', 'type': 'str'}, - 'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'credentials': {'key': 'credentials', 'type': 'DatastoreCredentials'}, - 'datastore_type': {'key': 'datastoreType', 'type': 'str'}, - 'intellectual_property': {'key': 'intellectualProperty', 'type': 'IntellectualProperty'}, - 'is_default': {'key': 'isDefault', 'type': 'bool'}, - 'account_name': {'key': 'accountName', 'type': 'str'}, - 'endpoint': {'key': 'endpoint', 'type': 'str'}, - 'filesystem': {'key': 'filesystem', 'type': 'str'}, - 'protocol': {'key': 'protocol', 'type': 'str'}, - 'service_data_access_auth_identity': {'key': 'serviceDataAccessAuthIdentity', 'type': 'str'}, + "resource_group": {"key": "resourceGroup", "type": "str"}, + "subscription_id": {"key": "subscriptionId", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "credentials": {"key": "credentials", "type": "DatastoreCredentials"}, + "datastore_type": {"key": "datastoreType", "type": "str"}, + "intellectual_property": {"key": "intellectualProperty", "type": "IntellectualProperty"}, + "is_default": {"key": "isDefault", "type": "bool"}, + "account_name": {"key": "accountName", "type": "str"}, + "endpoint": {"key": "endpoint", "type": "str"}, + "filesystem": {"key": "filesystem", "type": "str"}, + "protocol": {"key": "protocol", "type": "str"}, + "service_data_access_auth_identity": {"key": "serviceDataAccessAuthIdentity", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword resource_group: Azure Resource Group name. :paramtype resource_group: str @@ -3754,19 +3560,19 @@ def __init__( ~azure.mgmt.machinelearningservices.models.ServiceDataAccessAuthIdentity """ super(AzureDataLakeGen2Datastore, self).__init__(**kwargs) - self.resource_group = kwargs.get('resource_group', None) - self.subscription_id = kwargs.get('subscription_id', None) - self.datastore_type = 'AzureDataLakeGen2' # type: str - self.account_name = kwargs['account_name'] - self.endpoint = kwargs.get('endpoint', None) - self.filesystem = kwargs['filesystem'] - self.protocol = kwargs.get('protocol', None) - self.service_data_access_auth_identity = kwargs.get('service_data_access_auth_identity', None) - self.description = kwargs.get('description', None) - self.properties = kwargs.get('properties', None) - self.tags = kwargs.get('tags', None) - self.credentials = kwargs['credentials'] - self.intellectual_property = kwargs.get('intellectual_property', None) + self.resource_group = kwargs.get("resource_group", None) + self.subscription_id = kwargs.get("subscription_id", None) + self.datastore_type = "AzureDataLakeGen2" # type: str + self.account_name = kwargs["account_name"] + self.endpoint = kwargs.get("endpoint", None) + self.filesystem = kwargs["filesystem"] + self.protocol = kwargs.get("protocol", None) + self.service_data_access_auth_identity = kwargs.get("service_data_access_auth_identity", None) + self.description = kwargs.get("description", None) + self.properties = kwargs.get("properties", None) + self.tags = kwargs.get("tags", None) + self.credentials = kwargs["credentials"] + self.intellectual_property = kwargs.get("intellectual_property", None) self.is_default = None @@ -3786,28 +3592,23 @@ class Webhook(msrest.serialization.Model): """ _validation = { - 'webhook_type': {'required': True}, + "webhook_type": {"required": True}, } _attribute_map = { - 'event_type': {'key': 'eventType', 'type': 'str'}, - 'webhook_type': {'key': 'webhookType', 'type': 'str'}, + "event_type": {"key": "eventType", "type": "str"}, + "webhook_type": {"key": "webhookType", "type": "str"}, } - _subtype_map = { - 'webhook_type': {'AzureDevOps': 'AzureDevOpsWebhook'} - } + _subtype_map = {"webhook_type": {"AzureDevOps": "AzureDevOpsWebhook"}} - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword event_type: Send callback on a specified notification event. :paramtype event_type: str """ super(Webhook, self).__init__(**kwargs) - self.event_type = kwargs.get('event_type', None) + self.event_type = kwargs.get("event_type", None) self.webhook_type = None # type: Optional[str] @@ -3824,24 +3625,21 @@ class AzureDevOpsWebhook(Webhook): """ _validation = { - 'webhook_type': {'required': True}, + "webhook_type": {"required": True}, } _attribute_map = { - 'event_type': {'key': 'eventType', 'type': 'str'}, - 'webhook_type': {'key': 'webhookType', 'type': 'str'}, + "event_type": {"key": "eventType", "type": "str"}, + "webhook_type": {"key": "webhookType", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword event_type: Send callback on a specified notification event. :paramtype event_type: str """ super(AzureDevOpsWebhook, self).__init__(**kwargs) - self.webhook_type = 'AzureDevOps' # type: str + self.webhook_type = "AzureDevOps" # type: str class AzureFileDatastore(DatastoreProperties, AzureDatastore): @@ -3889,34 +3687,31 @@ class AzureFileDatastore(DatastoreProperties, AzureDatastore): """ _validation = { - 'credentials': {'required': True}, - 'datastore_type': {'required': True}, - 'is_default': {'readonly': True}, - 'account_name': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'file_share_name': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "credentials": {"required": True}, + "datastore_type": {"required": True}, + "is_default": {"readonly": True}, + "account_name": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "file_share_name": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'resource_group': {'key': 'resourceGroup', 'type': 'str'}, - 'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'credentials': {'key': 'credentials', 'type': 'DatastoreCredentials'}, - 'datastore_type': {'key': 'datastoreType', 'type': 'str'}, - 'intellectual_property': {'key': 'intellectualProperty', 'type': 'IntellectualProperty'}, - 'is_default': {'key': 'isDefault', 'type': 'bool'}, - 'account_name': {'key': 'accountName', 'type': 'str'}, - 'endpoint': {'key': 'endpoint', 'type': 'str'}, - 'file_share_name': {'key': 'fileShareName', 'type': 'str'}, - 'protocol': {'key': 'protocol', 'type': 'str'}, - 'service_data_access_auth_identity': {'key': 'serviceDataAccessAuthIdentity', 'type': 'str'}, + "resource_group": {"key": "resourceGroup", "type": "str"}, + "subscription_id": {"key": "subscriptionId", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "credentials": {"key": "credentials", "type": "DatastoreCredentials"}, + "datastore_type": {"key": "datastoreType", "type": "str"}, + "intellectual_property": {"key": "intellectualProperty", "type": "IntellectualProperty"}, + "is_default": {"key": "isDefault", "type": "bool"}, + "account_name": {"key": "accountName", "type": "str"}, + "endpoint": {"key": "endpoint", "type": "str"}, + "file_share_name": {"key": "fileShareName", "type": "str"}, + "protocol": {"key": "protocol", "type": "str"}, + "service_data_access_auth_identity": {"key": "serviceDataAccessAuthIdentity", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword resource_group: Azure Resource Group name. :paramtype resource_group: str @@ -3949,19 +3744,19 @@ def __init__( ~azure.mgmt.machinelearningservices.models.ServiceDataAccessAuthIdentity """ super(AzureFileDatastore, self).__init__(**kwargs) - self.resource_group = kwargs.get('resource_group', None) - self.subscription_id = kwargs.get('subscription_id', None) - self.datastore_type = 'AzureFile' # type: str - self.account_name = kwargs['account_name'] - self.endpoint = kwargs.get('endpoint', None) - self.file_share_name = kwargs['file_share_name'] - self.protocol = kwargs.get('protocol', None) - self.service_data_access_auth_identity = kwargs.get('service_data_access_auth_identity', None) - self.description = kwargs.get('description', None) - self.properties = kwargs.get('properties', None) - self.tags = kwargs.get('tags', None) - self.credentials = kwargs['credentials'] - self.intellectual_property = kwargs.get('intellectual_property', None) + self.resource_group = kwargs.get("resource_group", None) + self.subscription_id = kwargs.get("subscription_id", None) + self.datastore_type = "AzureFile" # type: str + self.account_name = kwargs["account_name"] + self.endpoint = kwargs.get("endpoint", None) + self.file_share_name = kwargs["file_share_name"] + self.protocol = kwargs.get("protocol", None) + self.service_data_access_auth_identity = kwargs.get("service_data_access_auth_identity", None) + self.description = kwargs.get("description", None) + self.properties = kwargs.get("properties", None) + self.tags = kwargs.get("tags", None) + self.credentials = kwargs["credentials"] + self.intellectual_property = kwargs.get("intellectual_property", None) self.is_default = None @@ -3979,25 +3774,24 @@ class InferencingServer(msrest.serialization.Model): """ _validation = { - 'server_type': {'required': True}, + "server_type": {"required": True}, } _attribute_map = { - 'server_type': {'key': 'serverType', 'type': 'str'}, + "server_type": {"key": "serverType", "type": "str"}, } _subtype_map = { - 'server_type': {'AzureMLBatch': 'AzureMLBatchInferencingServer', - 'AzureMLOnline': 'AzureMLOnlineInferencingServer', 'Custom': 'CustomInferencingServer', - 'Triton': 'TritonInferencingServer'} + "server_type": { + "AzureMLBatch": "AzureMLBatchInferencingServer", + "AzureMLOnline": "AzureMLOnlineInferencingServer", + "Custom": "CustomInferencingServer", + "Triton": "TritonInferencingServer", + } } - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(InferencingServer, self).__init__(**kwargs) self.server_type = None # type: Optional[str] @@ -4015,25 +3809,22 @@ class AzureMLBatchInferencingServer(InferencingServer): """ _validation = { - 'server_type': {'required': True}, + "server_type": {"required": True}, } _attribute_map = { - 'server_type': {'key': 'serverType', 'type': 'str'}, - 'code_configuration': {'key': 'codeConfiguration', 'type': 'CodeConfiguration'}, + "server_type": {"key": "serverType", "type": "str"}, + "code_configuration": {"key": "codeConfiguration", "type": "CodeConfiguration"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword code_configuration: Code configuration for AML batch inferencing server. :paramtype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration """ super(AzureMLBatchInferencingServer, self).__init__(**kwargs) - self.server_type = 'AzureMLBatch' # type: str - self.code_configuration = kwargs.get('code_configuration', None) + self.server_type = "AzureMLBatch" # type: str + self.code_configuration = kwargs.get("code_configuration", None) class AzureMLOnlineInferencingServer(InferencingServer): @@ -4049,25 +3840,22 @@ class AzureMLOnlineInferencingServer(InferencingServer): """ _validation = { - 'server_type': {'required': True}, + "server_type": {"required": True}, } _attribute_map = { - 'server_type': {'key': 'serverType', 'type': 'str'}, - 'code_configuration': {'key': 'codeConfiguration', 'type': 'CodeConfiguration'}, + "server_type": {"key": "serverType", "type": "str"}, + "code_configuration": {"key": "codeConfiguration", "type": "CodeConfiguration"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword code_configuration: Code configuration for AML inferencing server. :paramtype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration """ super(AzureMLOnlineInferencingServer, self).__init__(**kwargs) - self.server_type = 'AzureMLOnline' # type: str - self.code_configuration = kwargs.get('code_configuration', None) + self.server_type = "AzureMLOnline" # type: str + self.code_configuration = kwargs.get("code_configuration", None) class FineTuningVertical(msrest.serialization.Model): @@ -4095,28 +3883,23 @@ class FineTuningVertical(msrest.serialization.Model): """ _validation = { - 'model': {'required': True}, - 'model_provider': {'required': True}, - 'task_type': {'required': True}, - 'training_data': {'required': True}, + "model": {"required": True}, + "model_provider": {"required": True}, + "task_type": {"required": True}, + "training_data": {"required": True}, } _attribute_map = { - 'model': {'key': 'model', 'type': 'MLFlowModelJobInput'}, - 'model_provider': {'key': 'modelProvider', 'type': 'str'}, - 'task_type': {'key': 'taskType', 'type': 'str'}, - 'training_data': {'key': 'trainingData', 'type': 'JobInput'}, - 'validation_data': {'key': 'validationData', 'type': 'JobInput'}, + "model": {"key": "model", "type": "MLFlowModelJobInput"}, + "model_provider": {"key": "modelProvider", "type": "str"}, + "task_type": {"key": "taskType", "type": "str"}, + "training_data": {"key": "trainingData", "type": "JobInput"}, + "validation_data": {"key": "validationData", "type": "JobInput"}, } - _subtype_map = { - 'model_provider': {'AzureOpenAI': 'AzureOpenAiFineTuning', 'Custom': 'CustomModelFineTuning'} - } + _subtype_map = {"model_provider": {"AzureOpenAI": "AzureOpenAiFineTuning", "Custom": "CustomModelFineTuning"}} - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword model: Required. [Required] Input model for fine tuning. :paramtype model: ~azure.mgmt.machinelearningservices.models.MLFlowModelJobInput @@ -4131,11 +3914,11 @@ def __init__( :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.JobInput """ super(FineTuningVertical, self).__init__(**kwargs) - self.model = kwargs['model'] + self.model = kwargs["model"] self.model_provider = None # type: Optional[str] - self.task_type = kwargs['task_type'] - self.training_data = kwargs['training_data'] - self.validation_data = kwargs.get('validation_data', None) + self.task_type = kwargs["task_type"] + self.training_data = kwargs["training_data"] + self.validation_data = kwargs.get("validation_data", None) class AzureOpenAiFineTuning(FineTuningVertical): @@ -4163,25 +3946,22 @@ class AzureOpenAiFineTuning(FineTuningVertical): """ _validation = { - 'model': {'required': True}, - 'model_provider': {'required': True}, - 'task_type': {'required': True}, - 'training_data': {'required': True}, + "model": {"required": True}, + "model_provider": {"required": True}, + "task_type": {"required": True}, + "training_data": {"required": True}, } _attribute_map = { - 'model': {'key': 'model', 'type': 'MLFlowModelJobInput'}, - 'model_provider': {'key': 'modelProvider', 'type': 'str'}, - 'task_type': {'key': 'taskType', 'type': 'str'}, - 'training_data': {'key': 'trainingData', 'type': 'JobInput'}, - 'validation_data': {'key': 'validationData', 'type': 'JobInput'}, - 'hyper_parameters': {'key': 'hyperParameters', 'type': 'AzureOpenAiHyperParameters'}, + "model": {"key": "model", "type": "MLFlowModelJobInput"}, + "model_provider": {"key": "modelProvider", "type": "str"}, + "task_type": {"key": "taskType", "type": "str"}, + "training_data": {"key": "trainingData", "type": "JobInput"}, + "validation_data": {"key": "validationData", "type": "JobInput"}, + "hyper_parameters": {"key": "hyperParameters", "type": "AzureOpenAiHyperParameters"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword model: Required. [Required] Input model for fine tuning. :paramtype model: ~azure.mgmt.machinelearningservices.models.MLFlowModelJobInput @@ -4199,8 +3979,8 @@ def __init__( ~azure.mgmt.machinelearningservices.models.AzureOpenAiHyperParameters """ super(AzureOpenAiFineTuning, self).__init__(**kwargs) - self.model_provider = 'AzureOpenAI' # type: str - self.hyper_parameters = kwargs.get('hyper_parameters', None) + self.model_provider = "AzureOpenAI" # type: str + self.hyper_parameters = kwargs.get("hyper_parameters", None) class AzureOpenAiHyperParameters(msrest.serialization.Model): @@ -4218,15 +3998,12 @@ class AzureOpenAiHyperParameters(msrest.serialization.Model): """ _attribute_map = { - 'batch_size': {'key': 'batchSize', 'type': 'int'}, - 'learning_rate_multiplier': {'key': 'learningRateMultiplier', 'type': 'float'}, - 'n_epochs': {'key': 'nEpochs', 'type': 'int'}, + "batch_size": {"key": "batchSize", "type": "int"}, + "learning_rate_multiplier": {"key": "learningRateMultiplier", "type": "float"}, + "n_epochs": {"key": "nEpochs", "type": "int"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword batch_size: Number of examples in each batch. A larger batch size means that model parameters are updated less frequently, but with lower variance. @@ -4239,9 +4016,9 @@ def __init__( :paramtype n_epochs: int """ super(AzureOpenAiHyperParameters, self).__init__(**kwargs) - self.batch_size = kwargs.get('batch_size', None) - self.learning_rate_multiplier = kwargs.get('learning_rate_multiplier', None) - self.n_epochs = kwargs.get('n_epochs', None) + self.batch_size = kwargs.get("batch_size", None) + self.learning_rate_multiplier = kwargs.get("learning_rate_multiplier", None) + self.n_epochs = kwargs.get("n_epochs", None) class EarlyTerminationPolicy(msrest.serialization.Model): @@ -4263,24 +4040,24 @@ class EarlyTerminationPolicy(msrest.serialization.Model): """ _validation = { - 'policy_type': {'required': True}, + "policy_type": {"required": True}, } _attribute_map = { - 'delay_evaluation': {'key': 'delayEvaluation', 'type': 'int'}, - 'evaluation_interval': {'key': 'evaluationInterval', 'type': 'int'}, - 'policy_type': {'key': 'policyType', 'type': 'str'}, + "delay_evaluation": {"key": "delayEvaluation", "type": "int"}, + "evaluation_interval": {"key": "evaluationInterval", "type": "int"}, + "policy_type": {"key": "policyType", "type": "str"}, } _subtype_map = { - 'policy_type': {'Bandit': 'BanditPolicy', 'MedianStopping': 'MedianStoppingPolicy', - 'TruncationSelection': 'TruncationSelectionPolicy'} + "policy_type": { + "Bandit": "BanditPolicy", + "MedianStopping": "MedianStoppingPolicy", + "TruncationSelection": "TruncationSelectionPolicy", + } } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword delay_evaluation: Number of intervals by which to delay the first evaluation. :paramtype delay_evaluation: int @@ -4288,8 +4065,8 @@ def __init__( :paramtype evaluation_interval: int """ super(EarlyTerminationPolicy, self).__init__(**kwargs) - self.delay_evaluation = kwargs.get('delay_evaluation', 0) - self.evaluation_interval = kwargs.get('evaluation_interval', 0) + self.delay_evaluation = kwargs.get("delay_evaluation", 0) + self.evaluation_interval = kwargs.get("evaluation_interval", 0) self.policy_type = None # type: Optional[str] @@ -4313,21 +4090,18 @@ class BanditPolicy(EarlyTerminationPolicy): """ _validation = { - 'policy_type': {'required': True}, + "policy_type": {"required": True}, } _attribute_map = { - 'delay_evaluation': {'key': 'delayEvaluation', 'type': 'int'}, - 'evaluation_interval': {'key': 'evaluationInterval', 'type': 'int'}, - 'policy_type': {'key': 'policyType', 'type': 'str'}, - 'slack_amount': {'key': 'slackAmount', 'type': 'float'}, - 'slack_factor': {'key': 'slackFactor', 'type': 'float'}, + "delay_evaluation": {"key": "delayEvaluation", "type": "int"}, + "evaluation_interval": {"key": "evaluationInterval", "type": "int"}, + "policy_type": {"key": "policyType", "type": "str"}, + "slack_amount": {"key": "slackAmount", "type": "float"}, + "slack_factor": {"key": "slackFactor", "type": "float"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword delay_evaluation: Number of intervals by which to delay the first evaluation. :paramtype delay_evaluation: int @@ -4339,9 +4113,9 @@ def __init__( :paramtype slack_factor: float """ super(BanditPolicy, self).__init__(**kwargs) - self.policy_type = 'Bandit' # type: str - self.slack_amount = kwargs.get('slack_amount', 0) - self.slack_factor = kwargs.get('slack_factor', 0) + self.policy_type = "Bandit" # type: str + self.slack_amount = kwargs.get("slack_amount", 0) + self.slack_factor = kwargs.get("slack_factor", 0) class BaseEnvironmentSource(msrest.serialization.Model): @@ -4359,23 +4133,17 @@ class BaseEnvironmentSource(msrest.serialization.Model): """ _validation = { - 'base_environment_source_type': {'required': True}, + "base_environment_source_type": {"required": True}, } _attribute_map = { - 'base_environment_source_type': {'key': 'baseEnvironmentSourceType', 'type': 'str'}, + "base_environment_source_type": {"key": "baseEnvironmentSourceType", "type": "str"}, } - _subtype_map = { - 'base_environment_source_type': {'EnvironmentAsset': 'BaseEnvironmentId'} - } + _subtype_map = {"base_environment_source_type": {"EnvironmentAsset": "BaseEnvironmentId"}} - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(BaseEnvironmentSource, self).__init__(**kwargs) self.base_environment_source_type = None # type: Optional[str] @@ -4394,26 +4162,23 @@ class BaseEnvironmentId(BaseEnvironmentSource): """ _validation = { - 'base_environment_source_type': {'required': True}, - 'resource_id': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "base_environment_source_type": {"required": True}, + "resource_id": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'base_environment_source_type': {'key': 'baseEnvironmentSourceType', 'type': 'str'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, + "base_environment_source_type": {"key": "baseEnvironmentSourceType", "type": "str"}, + "resource_id": {"key": "resourceId", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword resource_id: Required. [Required] Resource id accepting ArmId or AzureMlId. :paramtype resource_id: str """ super(BaseEnvironmentId, self).__init__(**kwargs) - self.base_environment_source_type = 'EnvironmentAsset' # type: str - self.resource_id = kwargs['resource_id'] + self.base_environment_source_type = "EnvironmentAsset" # type: str + self.resource_id = kwargs["resource_id"] class Resource(msrest.serialization.Model): @@ -4435,25 +4200,21 @@ class Resource(msrest.serialization.Model): """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, } - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(Resource, self).__init__(**kwargs) self.id = None self.name = None @@ -4486,26 +4247,23 @@ class TrackedResource(Resource): """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'location': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "location": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'location': {'key': 'location', 'type': 'str'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "tags": {"key": "tags", "type": "{str}"}, + "location": {"key": "location", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword tags: A set of tags. Resource tags. :paramtype tags: dict[str, str] @@ -4513,8 +4271,8 @@ def __init__( :paramtype location: str """ super(TrackedResource, self).__init__(**kwargs) - self.tags = kwargs.get('tags', None) - self.location = kwargs['location'] + self.tags = kwargs.get("tags", None) + self.location = kwargs["location"] class BatchDeployment(TrackedResource): @@ -4551,31 +4309,28 @@ class BatchDeployment(TrackedResource): """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'location': {'required': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "location": {"required": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'location': {'key': 'location', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'}, - 'kind': {'key': 'kind', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'BatchDeploymentProperties'}, - 'sku': {'key': 'sku', 'type': 'Sku'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "tags": {"key": "tags", "type": "{str}"}, + "location": {"key": "location", "type": "str"}, + "identity": {"key": "identity", "type": "ManagedServiceIdentity"}, + "kind": {"key": "kind", "type": "str"}, + "properties": {"key": "properties", "type": "BatchDeploymentProperties"}, + "sku": {"key": "sku", "type": "Sku"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword tags: A set of tags. Resource tags. :paramtype tags: dict[str, str] @@ -4592,10 +4347,10 @@ def __init__( :paramtype sku: ~azure.mgmt.machinelearningservices.models.Sku """ super(BatchDeployment, self).__init__(**kwargs) - self.identity = kwargs.get('identity', None) - self.kind = kwargs.get('kind', None) - self.properties = kwargs['properties'] - self.sku = kwargs.get('sku', None) + self.identity = kwargs.get("identity", None) + self.kind = kwargs.get("kind", None) + self.properties = kwargs["properties"] + self.sku = kwargs.get("sku", None) class BatchDeploymentConfiguration(msrest.serialization.Model): @@ -4613,23 +4368,19 @@ class BatchDeploymentConfiguration(msrest.serialization.Model): """ _validation = { - 'deployment_configuration_type': {'required': True}, + "deployment_configuration_type": {"required": True}, } _attribute_map = { - 'deployment_configuration_type': {'key': 'deploymentConfigurationType', 'type': 'str'}, + "deployment_configuration_type": {"key": "deploymentConfigurationType", "type": "str"}, } _subtype_map = { - 'deployment_configuration_type': {'PipelineComponent': 'BatchPipelineComponentDeploymentConfiguration'} + "deployment_configuration_type": {"PipelineComponent": "BatchPipelineComponentDeploymentConfiguration"} } - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(BatchDeploymentConfiguration, self).__init__(**kwargs) self.deployment_configuration_type = None # type: Optional[str] @@ -4651,17 +4402,14 @@ class EndpointDeploymentPropertiesBase(msrest.serialization.Model): """ _attribute_map = { - 'code_configuration': {'key': 'codeConfiguration', 'type': 'CodeConfiguration'}, - 'description': {'key': 'description', 'type': 'str'}, - 'environment_id': {'key': 'environmentId', 'type': 'str'}, - 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, - 'properties': {'key': 'properties', 'type': '{str}'}, + "code_configuration": {"key": "codeConfiguration", "type": "CodeConfiguration"}, + "description": {"key": "description", "type": "str"}, + "environment_id": {"key": "environmentId", "type": "str"}, + "environment_variables": {"key": "environmentVariables", "type": "{str}"}, + "properties": {"key": "properties", "type": "{str}"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword code_configuration: Code configuration for the endpoint deployment. :paramtype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration @@ -4676,11 +4424,11 @@ def __init__( :paramtype properties: dict[str, str] """ super(EndpointDeploymentPropertiesBase, self).__init__(**kwargs) - self.code_configuration = kwargs.get('code_configuration', None) - self.description = kwargs.get('description', None) - self.environment_id = kwargs.get('environment_id', None) - self.environment_variables = kwargs.get('environment_variables', None) - self.properties = kwargs.get('properties', None) + self.code_configuration = kwargs.get("code_configuration", None) + self.description = kwargs.get("description", None) + self.environment_id = kwargs.get("environment_id", None) + self.environment_variables = kwargs.get("environment_variables", None) + self.properties = kwargs.get("properties", None) class BatchDeploymentProperties(EndpointDeploymentPropertiesBase): @@ -4740,33 +4488,30 @@ class BatchDeploymentProperties(EndpointDeploymentPropertiesBase): """ _validation = { - 'provisioning_state': {'readonly': True}, + "provisioning_state": {"readonly": True}, } _attribute_map = { - 'code_configuration': {'key': 'codeConfiguration', 'type': 'CodeConfiguration'}, - 'description': {'key': 'description', 'type': 'str'}, - 'environment_id': {'key': 'environmentId', 'type': 'str'}, - 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'compute': {'key': 'compute', 'type': 'str'}, - 'deployment_configuration': {'key': 'deploymentConfiguration', 'type': 'BatchDeploymentConfiguration'}, - 'error_threshold': {'key': 'errorThreshold', 'type': 'int'}, - 'logging_level': {'key': 'loggingLevel', 'type': 'str'}, - 'max_concurrency_per_instance': {'key': 'maxConcurrencyPerInstance', 'type': 'int'}, - 'mini_batch_size': {'key': 'miniBatchSize', 'type': 'long'}, - 'model': {'key': 'model', 'type': 'AssetReferenceBase'}, - 'output_action': {'key': 'outputAction', 'type': 'str'}, - 'output_file_name': {'key': 'outputFileName', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'resources': {'key': 'resources', 'type': 'DeploymentResourceConfiguration'}, - 'retry_settings': {'key': 'retrySettings', 'type': 'BatchRetrySettings'}, + "code_configuration": {"key": "codeConfiguration", "type": "CodeConfiguration"}, + "description": {"key": "description", "type": "str"}, + "environment_id": {"key": "environmentId", "type": "str"}, + "environment_variables": {"key": "environmentVariables", "type": "{str}"}, + "properties": {"key": "properties", "type": "{str}"}, + "compute": {"key": "compute", "type": "str"}, + "deployment_configuration": {"key": "deploymentConfiguration", "type": "BatchDeploymentConfiguration"}, + "error_threshold": {"key": "errorThreshold", "type": "int"}, + "logging_level": {"key": "loggingLevel", "type": "str"}, + "max_concurrency_per_instance": {"key": "maxConcurrencyPerInstance", "type": "int"}, + "mini_batch_size": {"key": "miniBatchSize", "type": "long"}, + "model": {"key": "model", "type": "AssetReferenceBase"}, + "output_action": {"key": "outputAction", "type": "str"}, + "output_file_name": {"key": "outputFileName", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "resources": {"key": "resources", "type": "DeploymentResourceConfiguration"}, + "retry_settings": {"key": "retrySettings", "type": "BatchRetrySettings"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword code_configuration: Code configuration for the endpoint deployment. :paramtype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration @@ -4816,18 +4561,18 @@ def __init__( :paramtype retry_settings: ~azure.mgmt.machinelearningservices.models.BatchRetrySettings """ super(BatchDeploymentProperties, self).__init__(**kwargs) - self.compute = kwargs.get('compute', None) - self.deployment_configuration = kwargs.get('deployment_configuration', None) - self.error_threshold = kwargs.get('error_threshold', -1) - self.logging_level = kwargs.get('logging_level', None) - self.max_concurrency_per_instance = kwargs.get('max_concurrency_per_instance', 1) - self.mini_batch_size = kwargs.get('mini_batch_size', 10) - self.model = kwargs.get('model', None) - self.output_action = kwargs.get('output_action', None) - self.output_file_name = kwargs.get('output_file_name', "predictions.csv") + self.compute = kwargs.get("compute", None) + self.deployment_configuration = kwargs.get("deployment_configuration", None) + self.error_threshold = kwargs.get("error_threshold", -1) + self.logging_level = kwargs.get("logging_level", None) + self.max_concurrency_per_instance = kwargs.get("max_concurrency_per_instance", 1) + self.mini_batch_size = kwargs.get("mini_batch_size", 10) + self.model = kwargs.get("model", None) + self.output_action = kwargs.get("output_action", None) + self.output_file_name = kwargs.get("output_file_name", "predictions.csv") self.provisioning_state = None - self.resources = kwargs.get('resources', None) - self.retry_settings = kwargs.get('retry_settings', None) + self.resources = kwargs.get("resources", None) + self.retry_settings = kwargs.get("retry_settings", None) class BatchDeploymentTrackedResourceArmPaginatedResult(msrest.serialization.Model): @@ -4841,14 +4586,11 @@ class BatchDeploymentTrackedResourceArmPaginatedResult(msrest.serialization.Mode """ _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[BatchDeployment]'}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[BatchDeployment]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword next_link: The link to the next page of BatchDeployment objects. If null, there are no additional pages. @@ -4857,8 +4599,8 @@ def __init__( :paramtype value: list[~azure.mgmt.machinelearningservices.models.BatchDeployment] """ super(BatchDeploymentTrackedResourceArmPaginatedResult, self).__init__(**kwargs) - self.next_link = kwargs.get('next_link', None) - self.value = kwargs.get('value', None) + self.next_link = kwargs.get("next_link", None) + self.value = kwargs.get("value", None) class BatchEndpoint(TrackedResource): @@ -4895,31 +4637,28 @@ class BatchEndpoint(TrackedResource): """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'location': {'required': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "location": {"required": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'location': {'key': 'location', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'}, - 'kind': {'key': 'kind', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'BatchEndpointProperties'}, - 'sku': {'key': 'sku', 'type': 'Sku'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "tags": {"key": "tags", "type": "{str}"}, + "location": {"key": "location", "type": "str"}, + "identity": {"key": "identity", "type": "ManagedServiceIdentity"}, + "kind": {"key": "kind", "type": "str"}, + "properties": {"key": "properties", "type": "BatchEndpointProperties"}, + "sku": {"key": "sku", "type": "Sku"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword tags: A set of tags. Resource tags. :paramtype tags: dict[str, str] @@ -4936,10 +4675,10 @@ def __init__( :paramtype sku: ~azure.mgmt.machinelearningservices.models.Sku """ super(BatchEndpoint, self).__init__(**kwargs) - self.identity = kwargs.get('identity', None) - self.kind = kwargs.get('kind', None) - self.properties = kwargs['properties'] - self.sku = kwargs.get('sku', None) + self.identity = kwargs.get("identity", None) + self.kind = kwargs.get("kind", None) + self.properties = kwargs["properties"] + self.sku = kwargs.get("sku", None) class BatchEndpointDefaults(msrest.serialization.Model): @@ -4951,20 +4690,17 @@ class BatchEndpointDefaults(msrest.serialization.Model): """ _attribute_map = { - 'deployment_name': {'key': 'deploymentName', 'type': 'str'}, + "deployment_name": {"key": "deploymentName", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword deployment_name: Name of the deployment that will be default for the endpoint. This deployment will end up getting 100% traffic when the endpoint scoring URL is invoked. :paramtype deployment_name: str """ super(BatchEndpointDefaults, self).__init__(**kwargs) - self.deployment_name = kwargs.get('deployment_name', None) + self.deployment_name = kwargs.get("deployment_name", None) class EndpointPropertiesBase(msrest.serialization.Model): @@ -4993,24 +4729,21 @@ class EndpointPropertiesBase(msrest.serialization.Model): """ _validation = { - 'auth_mode': {'required': True}, - 'scoring_uri': {'readonly': True}, - 'swagger_uri': {'readonly': True}, + "auth_mode": {"required": True}, + "scoring_uri": {"readonly": True}, + "swagger_uri": {"readonly": True}, } _attribute_map = { - 'auth_mode': {'key': 'authMode', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'keys': {'key': 'keys', 'type': 'EndpointAuthKeys'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'scoring_uri': {'key': 'scoringUri', 'type': 'str'}, - 'swagger_uri': {'key': 'swaggerUri', 'type': 'str'}, + "auth_mode": {"key": "authMode", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "keys": {"key": "keys", "type": "EndpointAuthKeys"}, + "properties": {"key": "properties", "type": "{str}"}, + "scoring_uri": {"key": "scoringUri", "type": "str"}, + "swagger_uri": {"key": "swaggerUri", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword auth_mode: Required. [Required] Use 'Key' for key based authentication and 'AMLToken' for Azure Machine Learning token-based authentication. 'Key' doesn't expire but 'AMLToken' @@ -5026,10 +4759,10 @@ def __init__( :paramtype properties: dict[str, str] """ super(EndpointPropertiesBase, self).__init__(**kwargs) - self.auth_mode = kwargs['auth_mode'] - self.description = kwargs.get('description', None) - self.keys = kwargs.get('keys', None) - self.properties = kwargs.get('properties', None) + self.auth_mode = kwargs["auth_mode"] + self.description = kwargs.get("description", None) + self.keys = kwargs.get("keys", None) + self.properties = kwargs.get("properties", None) self.scoring_uri = None self.swagger_uri = None @@ -5066,27 +4799,24 @@ class BatchEndpointProperties(EndpointPropertiesBase): """ _validation = { - 'auth_mode': {'required': True}, - 'scoring_uri': {'readonly': True}, - 'swagger_uri': {'readonly': True}, - 'provisioning_state': {'readonly': True}, + "auth_mode": {"required": True}, + "scoring_uri": {"readonly": True}, + "swagger_uri": {"readonly": True}, + "provisioning_state": {"readonly": True}, } _attribute_map = { - 'auth_mode': {'key': 'authMode', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'keys': {'key': 'keys', 'type': 'EndpointAuthKeys'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'scoring_uri': {'key': 'scoringUri', 'type': 'str'}, - 'swagger_uri': {'key': 'swaggerUri', 'type': 'str'}, - 'defaults': {'key': 'defaults', 'type': 'BatchEndpointDefaults'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + "auth_mode": {"key": "authMode", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "keys": {"key": "keys", "type": "EndpointAuthKeys"}, + "properties": {"key": "properties", "type": "{str}"}, + "scoring_uri": {"key": "scoringUri", "type": "str"}, + "swagger_uri": {"key": "swaggerUri", "type": "str"}, + "defaults": {"key": "defaults", "type": "BatchEndpointDefaults"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword auth_mode: Required. [Required] Use 'Key' for key based authentication and 'AMLToken' for Azure Machine Learning token-based authentication. 'Key' doesn't expire but 'AMLToken' @@ -5104,7 +4834,7 @@ def __init__( :paramtype defaults: ~azure.mgmt.machinelearningservices.models.BatchEndpointDefaults """ super(BatchEndpointProperties, self).__init__(**kwargs) - self.defaults = kwargs.get('defaults', None) + self.defaults = kwargs.get("defaults", None) self.provisioning_state = None @@ -5119,14 +4849,11 @@ class BatchEndpointTrackedResourceArmPaginatedResult(msrest.serialization.Model) """ _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[BatchEndpoint]'}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[BatchEndpoint]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword next_link: The link to the next page of BatchEndpoint objects. If null, there are no additional pages. @@ -5135,8 +4862,8 @@ def __init__( :paramtype value: list[~azure.mgmt.machinelearningservices.models.BatchEndpoint] """ super(BatchEndpointTrackedResourceArmPaginatedResult, self).__init__(**kwargs) - self.next_link = kwargs.get('next_link', None) - self.value = kwargs.get('value', None) + self.next_link = kwargs.get("next_link", None) + self.value = kwargs.get("value", None) class BatchPipelineComponentDeploymentConfiguration(BatchDeploymentConfiguration): @@ -5159,21 +4886,18 @@ class BatchPipelineComponentDeploymentConfiguration(BatchDeploymentConfiguration """ _validation = { - 'deployment_configuration_type': {'required': True}, + "deployment_configuration_type": {"required": True}, } _attribute_map = { - 'deployment_configuration_type': {'key': 'deploymentConfigurationType', 'type': 'str'}, - 'component_id': {'key': 'componentId', 'type': 'IdAssetReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'settings': {'key': 'settings', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, + "deployment_configuration_type": {"key": "deploymentConfigurationType", "type": "str"}, + "component_id": {"key": "componentId", "type": "IdAssetReference"}, + "description": {"key": "description", "type": "str"}, + "settings": {"key": "settings", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword component_id: The ARM id of the component to be run. :paramtype component_id: ~azure.mgmt.machinelearningservices.models.IdAssetReference @@ -5185,11 +4909,11 @@ def __init__( :paramtype tags: dict[str, str] """ super(BatchPipelineComponentDeploymentConfiguration, self).__init__(**kwargs) - self.deployment_configuration_type = 'PipelineComponent' # type: str - self.component_id = kwargs.get('component_id', None) - self.description = kwargs.get('description', None) - self.settings = kwargs.get('settings', None) - self.tags = kwargs.get('tags', None) + self.deployment_configuration_type = "PipelineComponent" # type: str + self.component_id = kwargs.get("component_id", None) + self.description = kwargs.get("description", None) + self.settings = kwargs.get("settings", None) + self.tags = kwargs.get("tags", None) class BatchRetrySettings(msrest.serialization.Model): @@ -5202,14 +4926,11 @@ class BatchRetrySettings(msrest.serialization.Model): """ _attribute_map = { - 'max_retries': {'key': 'maxRetries', 'type': 'int'}, - 'timeout': {'key': 'timeout', 'type': 'duration'}, + "max_retries": {"key": "maxRetries", "type": "int"}, + "timeout": {"key": "timeout", "type": "duration"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword max_retries: Maximum retry count for a mini-batch. :paramtype max_retries: int @@ -5217,45 +4938,44 @@ def __init__( :paramtype timeout: ~datetime.timedelta """ super(BatchRetrySettings, self).__init__(**kwargs) - self.max_retries = kwargs.get('max_retries', 3) - self.timeout = kwargs.get('timeout', "PT30S") + self.max_retries = kwargs.get("max_retries", 3) + self.timeout = kwargs.get("timeout", "PT30S") class SamplingAlgorithm(msrest.serialization.Model): """The Sampling Algorithm used to generate hyperparameter values, along with properties to -configure the algorithm. + configure the algorithm. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: BayesianSamplingAlgorithm, GridSamplingAlgorithm, RandomSamplingAlgorithm. + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: BayesianSamplingAlgorithm, GridSamplingAlgorithm, RandomSamplingAlgorithm. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to Azure. - :ivar sampling_algorithm_type: Required. [Required] The algorithm used for generating - hyperparameter values, along with configuration properties.Constant filled by server. Possible - values include: "Grid", "Random", "Bayesian". - :vartype sampling_algorithm_type: str or - ~azure.mgmt.machinelearningservices.models.SamplingAlgorithmType + :ivar sampling_algorithm_type: Required. [Required] The algorithm used for generating + hyperparameter values, along with configuration properties.Constant filled by server. Possible + values include: "Grid", "Random", "Bayesian". + :vartype sampling_algorithm_type: str or + ~azure.mgmt.machinelearningservices.models.SamplingAlgorithmType """ _validation = { - 'sampling_algorithm_type': {'required': True}, + "sampling_algorithm_type": {"required": True}, } _attribute_map = { - 'sampling_algorithm_type': {'key': 'samplingAlgorithmType', 'type': 'str'}, + "sampling_algorithm_type": {"key": "samplingAlgorithmType", "type": "str"}, } _subtype_map = { - 'sampling_algorithm_type': {'Bayesian': 'BayesianSamplingAlgorithm', 'Grid': 'GridSamplingAlgorithm', - 'Random': 'RandomSamplingAlgorithm'} + "sampling_algorithm_type": { + "Bayesian": "BayesianSamplingAlgorithm", + "Grid": "GridSamplingAlgorithm", + "Random": "RandomSamplingAlgorithm", + } } - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(SamplingAlgorithm, self).__init__(**kwargs) self.sampling_algorithm_type = None # type: Optional[str] @@ -5273,21 +4993,17 @@ class BayesianSamplingAlgorithm(SamplingAlgorithm): """ _validation = { - 'sampling_algorithm_type': {'required': True}, + "sampling_algorithm_type": {"required": True}, } _attribute_map = { - 'sampling_algorithm_type': {'key': 'samplingAlgorithmType', 'type': 'str'}, + "sampling_algorithm_type": {"key": "samplingAlgorithmType", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(BayesianSamplingAlgorithm, self).__init__(**kwargs) - self.sampling_algorithm_type = 'Bayesian' # type: str + self.sampling_algorithm_type = "Bayesian" # type: str class BindOptions(msrest.serialization.Model): @@ -5302,15 +5018,12 @@ class BindOptions(msrest.serialization.Model): """ _attribute_map = { - 'propagation': {'key': 'propagation', 'type': 'str'}, - 'create_host_path': {'key': 'createHostPath', 'type': 'bool'}, - 'selinux': {'key': 'selinux', 'type': 'str'}, + "propagation": {"key": "propagation", "type": "str"}, + "create_host_path": {"key": "createHostPath", "type": "bool"}, + "selinux": {"key": "selinux", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword propagation: Type of Bind Option. :paramtype propagation: str @@ -5320,9 +5033,9 @@ def __init__( :paramtype selinux: str """ super(BindOptions, self).__init__(**kwargs) - self.propagation = kwargs.get('propagation', None) - self.create_host_path = kwargs.get('create_host_path', None) - self.selinux = kwargs.get('selinux', None) + self.propagation = kwargs.get("propagation", None) + self.create_host_path = kwargs.get("create_host_path", None) + self.selinux = kwargs.get("selinux", None) class BlobReferenceForConsumptionDto(msrest.serialization.Model): @@ -5338,15 +5051,12 @@ class BlobReferenceForConsumptionDto(msrest.serialization.Model): """ _attribute_map = { - 'blob_uri': {'key': 'blobUri', 'type': 'str'}, - 'credential': {'key': 'credential', 'type': 'PendingUploadCredentialDto'}, - 'storage_account_arm_id': {'key': 'storageAccountArmId', 'type': 'str'}, + "blob_uri": {"key": "blobUri", "type": "str"}, + "credential": {"key": "credential", "type": "PendingUploadCredentialDto"}, + "storage_account_arm_id": {"key": "storageAccountArmId", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword blob_uri: Blob URI path for client to upload data. Example: https://blob.windows.core.net/Container/Path. @@ -5357,9 +5067,9 @@ def __init__( :paramtype storage_account_arm_id: str """ super(BlobReferenceForConsumptionDto, self).__init__(**kwargs) - self.blob_uri = kwargs.get('blob_uri', None) - self.credential = kwargs.get('credential', None) - self.storage_account_arm_id = kwargs.get('storage_account_arm_id', None) + self.blob_uri = kwargs.get("blob_uri", None) + self.credential = kwargs.get("credential", None) + self.storage_account_arm_id = kwargs.get("storage_account_arm_id", None) class BuildContext(msrest.serialization.Model): @@ -5386,18 +5096,15 @@ class BuildContext(msrest.serialization.Model): """ _validation = { - 'context_uri': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "context_uri": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'context_uri': {'key': 'contextUri', 'type': 'str'}, - 'dockerfile_path': {'key': 'dockerfilePath', 'type': 'str'}, + "context_uri": {"key": "contextUri", "type": "str"}, + "dockerfile_path": {"key": "dockerfilePath", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword context_uri: Required. [Required] URI of the Docker build context used to build the image. Supports blob URIs on environment creation and may return blob or Git URIs. @@ -5417,8 +5124,8 @@ def __init__( :paramtype dockerfile_path: str """ super(BuildContext, self).__init__(**kwargs) - self.context_uri = kwargs['context_uri'] - self.dockerfile_path = kwargs.get('dockerfile_path', "Dockerfile") + self.context_uri = kwargs["context_uri"] + self.dockerfile_path = kwargs.get("dockerfile_path", "Dockerfile") class CallRateLimit(msrest.serialization.Model): @@ -5433,15 +5140,12 @@ class CallRateLimit(msrest.serialization.Model): """ _attribute_map = { - 'count': {'key': 'count', 'type': 'float'}, - 'renewal_period': {'key': 'renewalPeriod', 'type': 'float'}, - 'rules': {'key': 'rules', 'type': '[ThrottlingRule]'}, + "count": {"key": "count", "type": "float"}, + "renewal_period": {"key": "renewalPeriod", "type": "float"}, + "rules": {"key": "rules", "type": "[ThrottlingRule]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword count: The count value of Call Rate Limit. :paramtype count: float @@ -5451,9 +5155,9 @@ def __init__( :paramtype rules: list[~azure.mgmt.machinelearningservices.models.ThrottlingRule] """ super(CallRateLimit, self).__init__(**kwargs) - self.count = kwargs.get('count', None) - self.renewal_period = kwargs.get('renewal_period', None) - self.rules = kwargs.get('rules', None) + self.count = kwargs.get("count", None) + self.renewal_period = kwargs.get("renewal_period", None) + self.rules = kwargs.get("rules", None) class CapacityConfig(msrest.serialization.Model): @@ -5472,17 +5176,14 @@ class CapacityConfig(msrest.serialization.Model): """ _attribute_map = { - 'minimum': {'key': 'minimum', 'type': 'int'}, - 'maximum': {'key': 'maximum', 'type': 'int'}, - 'step': {'key': 'step', 'type': 'int'}, - 'default': {'key': 'default', 'type': 'int'}, - 'allowed_values': {'key': 'allowedValues', 'type': '[int]'}, + "minimum": {"key": "minimum", "type": "int"}, + "maximum": {"key": "maximum", "type": "int"}, + "step": {"key": "step", "type": "int"}, + "default": {"key": "default", "type": "int"}, + "allowed_values": {"key": "allowedValues", "type": "[int]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword minimum: The minimum capacity. :paramtype minimum: int @@ -5496,11 +5197,11 @@ def __init__( :paramtype allowed_values: list[int] """ super(CapacityConfig, self).__init__(**kwargs) - self.minimum = kwargs.get('minimum', None) - self.maximum = kwargs.get('maximum', None) - self.step = kwargs.get('step', None) - self.default = kwargs.get('default', None) - self.allowed_values = kwargs.get('allowed_values', None) + self.minimum = kwargs.get("minimum", None) + self.maximum = kwargs.get("maximum", None) + self.step = kwargs.get("step", None) + self.default = kwargs.get("default", None) + self.allowed_values = kwargs.get("allowed_values", None) class CapacityReservationGroup(TrackedResource): @@ -5538,31 +5239,28 @@ class CapacityReservationGroup(TrackedResource): """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'location': {'required': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "location": {"required": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'location': {'key': 'location', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'}, - 'kind': {'key': 'kind', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'CapacityReservationGroupProperties'}, - 'sku': {'key': 'sku', 'type': 'Sku'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "tags": {"key": "tags", "type": "{str}"}, + "location": {"key": "location", "type": "str"}, + "identity": {"key": "identity", "type": "ManagedServiceIdentity"}, + "kind": {"key": "kind", "type": "str"}, + "properties": {"key": "properties", "type": "CapacityReservationGroupProperties"}, + "sku": {"key": "sku", "type": "Sku"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword tags: A set of tags. Resource tags. :paramtype tags: dict[str, str] @@ -5580,10 +5278,10 @@ def __init__( :paramtype sku: ~azure.mgmt.machinelearningservices.models.Sku """ super(CapacityReservationGroup, self).__init__(**kwargs) - self.identity = kwargs.get('identity', None) - self.kind = kwargs.get('kind', None) - self.properties = kwargs['properties'] - self.sku = kwargs.get('sku', None) + self.identity = kwargs.get("identity", None) + self.kind = kwargs.get("kind", None) + self.properties = kwargs["properties"] + self.sku = kwargs.get("sku", None) class CapacityReservationGroupProperties(msrest.serialization.Model): @@ -5598,18 +5296,15 @@ class CapacityReservationGroupProperties(msrest.serialization.Model): """ _validation = { - 'reserved_capacity': {'required': True}, + "reserved_capacity": {"required": True}, } _attribute_map = { - 'offer': {'key': 'offer', 'type': 'ServerlessOffer'}, - 'reserved_capacity': {'key': 'reservedCapacity', 'type': 'int'}, + "offer": {"key": "offer", "type": "ServerlessOffer"}, + "reserved_capacity": {"key": "reservedCapacity", "type": "int"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword offer: Offer used by this capacity reservation group. :paramtype offer: ~azure.mgmt.machinelearningservices.models.ServerlessOffer @@ -5617,8 +5312,8 @@ def __init__( :paramtype reserved_capacity: int """ super(CapacityReservationGroupProperties, self).__init__(**kwargs) - self.offer = kwargs.get('offer', None) - self.reserved_capacity = kwargs['reserved_capacity'] + self.offer = kwargs.get("offer", None) + self.reserved_capacity = kwargs["reserved_capacity"] class CapacityReservationGroupTrackedResourceArmPaginatedResult(msrest.serialization.Model): @@ -5632,14 +5327,11 @@ class CapacityReservationGroupTrackedResourceArmPaginatedResult(msrest.serializa """ _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[CapacityReservationGroup]'}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[CapacityReservationGroup]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword next_link: The link to the next page of CapacityReservationGroup objects. If null, there are no additional pages. @@ -5648,8 +5340,8 @@ def __init__( :paramtype value: list[~azure.mgmt.machinelearningservices.models.CapacityReservationGroup] """ super(CapacityReservationGroupTrackedResourceArmPaginatedResult, self).__init__(**kwargs) - self.next_link = kwargs.get('next_link', None) - self.value = kwargs.get('value', None) + self.next_link = kwargs.get("next_link", None) + self.value = kwargs.get("value", None) class DataDriftMetricThresholdBase(msrest.serialization.Model): @@ -5669,23 +5361,22 @@ class DataDriftMetricThresholdBase(msrest.serialization.Model): """ _validation = { - 'data_type': {'required': True}, + "data_type": {"required": True}, } _attribute_map = { - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'threshold': {'key': 'threshold', 'type': 'MonitoringThreshold'}, + "data_type": {"key": "dataType", "type": "str"}, + "threshold": {"key": "threshold", "type": "MonitoringThreshold"}, } _subtype_map = { - 'data_type': {'Categorical': 'CategoricalDataDriftMetricThreshold', - 'Numerical': 'NumericalDataDriftMetricThreshold'} + "data_type": { + "Categorical": "CategoricalDataDriftMetricThreshold", + "Numerical": "NumericalDataDriftMetricThreshold", + } } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword threshold: The threshold value. If null, a default value will be set depending on the selected metric. @@ -5693,7 +5384,7 @@ def __init__( """ super(DataDriftMetricThresholdBase, self).__init__(**kwargs) self.data_type = None # type: Optional[str] - self.threshold = kwargs.get('threshold', None) + self.threshold = kwargs.get("threshold", None) class CategoricalDataDriftMetricThreshold(DataDriftMetricThresholdBase): @@ -5713,20 +5404,17 @@ class CategoricalDataDriftMetricThreshold(DataDriftMetricThresholdBase): """ _validation = { - 'data_type': {'required': True}, - 'metric': {'required': True}, + "data_type": {"required": True}, + "metric": {"required": True}, } _attribute_map = { - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'threshold': {'key': 'threshold', 'type': 'MonitoringThreshold'}, - 'metric': {'key': 'metric', 'type': 'str'}, + "data_type": {"key": "dataType", "type": "str"}, + "threshold": {"key": "threshold", "type": "MonitoringThreshold"}, + "metric": {"key": "metric", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword threshold: The threshold value. If null, a default value will be set depending on the selected metric. @@ -5736,8 +5424,8 @@ def __init__( :paramtype metric: str or ~azure.mgmt.machinelearningservices.models.CategoricalDataDriftMetric """ super(CategoricalDataDriftMetricThreshold, self).__init__(**kwargs) - self.data_type = 'Categorical' # type: str - self.metric = kwargs['metric'] + self.data_type = "Categorical" # type: str + self.metric = kwargs["metric"] class DataQualityMetricThresholdBase(msrest.serialization.Model): @@ -5757,23 +5445,22 @@ class DataQualityMetricThresholdBase(msrest.serialization.Model): """ _validation = { - 'data_type': {'required': True}, + "data_type": {"required": True}, } _attribute_map = { - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'threshold': {'key': 'threshold', 'type': 'MonitoringThreshold'}, + "data_type": {"key": "dataType", "type": "str"}, + "threshold": {"key": "threshold", "type": "MonitoringThreshold"}, } _subtype_map = { - 'data_type': {'Categorical': 'CategoricalDataQualityMetricThreshold', - 'Numerical': 'NumericalDataQualityMetricThreshold'} + "data_type": { + "Categorical": "CategoricalDataQualityMetricThreshold", + "Numerical": "NumericalDataQualityMetricThreshold", + } } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword threshold: The threshold value. If null, a default value will be set depending on the selected metric. @@ -5781,7 +5468,7 @@ def __init__( """ super(DataQualityMetricThresholdBase, self).__init__(**kwargs) self.data_type = None # type: Optional[str] - self.threshold = kwargs.get('threshold', None) + self.threshold = kwargs.get("threshold", None) class CategoricalDataQualityMetricThreshold(DataQualityMetricThresholdBase): @@ -5801,20 +5488,17 @@ class CategoricalDataQualityMetricThreshold(DataQualityMetricThresholdBase): """ _validation = { - 'data_type': {'required': True}, - 'metric': {'required': True}, + "data_type": {"required": True}, + "metric": {"required": True}, } _attribute_map = { - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'threshold': {'key': 'threshold', 'type': 'MonitoringThreshold'}, - 'metric': {'key': 'metric', 'type': 'str'}, + "data_type": {"key": "dataType", "type": "str"}, + "threshold": {"key": "threshold", "type": "MonitoringThreshold"}, + "metric": {"key": "metric", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword threshold: The threshold value. If null, a default value will be set depending on the selected metric. @@ -5825,8 +5509,8 @@ def __init__( ~azure.mgmt.machinelearningservices.models.CategoricalDataQualityMetric """ super(CategoricalDataQualityMetricThreshold, self).__init__(**kwargs) - self.data_type = 'Categorical' # type: str - self.metric = kwargs['metric'] + self.data_type = "Categorical" # type: str + self.metric = kwargs["metric"] class PredictionDriftMetricThresholdBase(msrest.serialization.Model): @@ -5846,23 +5530,22 @@ class PredictionDriftMetricThresholdBase(msrest.serialization.Model): """ _validation = { - 'data_type': {'required': True}, + "data_type": {"required": True}, } _attribute_map = { - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'threshold': {'key': 'threshold', 'type': 'MonitoringThreshold'}, + "data_type": {"key": "dataType", "type": "str"}, + "threshold": {"key": "threshold", "type": "MonitoringThreshold"}, } _subtype_map = { - 'data_type': {'Categorical': 'CategoricalPredictionDriftMetricThreshold', - 'Numerical': 'NumericalPredictionDriftMetricThreshold'} + "data_type": { + "Categorical": "CategoricalPredictionDriftMetricThreshold", + "Numerical": "NumericalPredictionDriftMetricThreshold", + } } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword threshold: The threshold value. If null, a default value will be set depending on the selected metric. @@ -5870,7 +5553,7 @@ def __init__( """ super(PredictionDriftMetricThresholdBase, self).__init__(**kwargs) self.data_type = None # type: Optional[str] - self.threshold = kwargs.get('threshold', None) + self.threshold = kwargs.get("threshold", None) class CategoricalPredictionDriftMetricThreshold(PredictionDriftMetricThresholdBase): @@ -5892,20 +5575,17 @@ class CategoricalPredictionDriftMetricThreshold(PredictionDriftMetricThresholdBa """ _validation = { - 'data_type': {'required': True}, - 'metric': {'required': True}, + "data_type": {"required": True}, + "metric": {"required": True}, } _attribute_map = { - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'threshold': {'key': 'threshold', 'type': 'MonitoringThreshold'}, - 'metric': {'key': 'metric', 'type': 'str'}, + "data_type": {"key": "dataType", "type": "str"}, + "threshold": {"key": "threshold", "type": "MonitoringThreshold"}, + "metric": {"key": "metric", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword threshold: The threshold value. If null, a default value will be set depending on the selected metric. @@ -5917,8 +5597,8 @@ def __init__( ~azure.mgmt.machinelearningservices.models.CategoricalPredictionDriftMetric """ super(CategoricalPredictionDriftMetricThreshold, self).__init__(**kwargs) - self.data_type = 'Categorical' # type: str - self.metric = kwargs['metric'] + self.data_type = "Categorical" # type: str + self.metric = kwargs["metric"] class CertificateDatastoreCredentials(DatastoreCredentials): @@ -5945,27 +5625,24 @@ class CertificateDatastoreCredentials(DatastoreCredentials): """ _validation = { - 'credentials_type': {'required': True}, - 'client_id': {'required': True}, - 'secrets': {'required': True}, - 'tenant_id': {'required': True}, - 'thumbprint': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "credentials_type": {"required": True}, + "client_id": {"required": True}, + "secrets": {"required": True}, + "tenant_id": {"required": True}, + "thumbprint": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'credentials_type': {'key': 'credentialsType', 'type': 'str'}, - 'authority_url': {'key': 'authorityUrl', 'type': 'str'}, - 'client_id': {'key': 'clientId', 'type': 'str'}, - 'resource_url': {'key': 'resourceUrl', 'type': 'str'}, - 'secrets': {'key': 'secrets', 'type': 'CertificateDatastoreSecrets'}, - 'tenant_id': {'key': 'tenantId', 'type': 'str'}, - 'thumbprint': {'key': 'thumbprint', 'type': 'str'}, + "credentials_type": {"key": "credentialsType", "type": "str"}, + "authority_url": {"key": "authorityUrl", "type": "str"}, + "client_id": {"key": "clientId", "type": "str"}, + "resource_url": {"key": "resourceUrl", "type": "str"}, + "secrets": {"key": "secrets", "type": "CertificateDatastoreSecrets"}, + "tenant_id": {"key": "tenantId", "type": "str"}, + "thumbprint": {"key": "thumbprint", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword authority_url: Authority URL used for authentication. :paramtype authority_url: str @@ -5983,13 +5660,13 @@ def __init__( :paramtype thumbprint: str """ super(CertificateDatastoreCredentials, self).__init__(**kwargs) - self.credentials_type = 'Certificate' # type: str - self.authority_url = kwargs.get('authority_url', None) - self.client_id = kwargs['client_id'] - self.resource_url = kwargs.get('resource_url', None) - self.secrets = kwargs['secrets'] - self.tenant_id = kwargs['tenant_id'] - self.thumbprint = kwargs['thumbprint'] + self.credentials_type = "Certificate" # type: str + self.authority_url = kwargs.get("authority_url", None) + self.client_id = kwargs["client_id"] + self.resource_url = kwargs.get("resource_url", None) + self.secrets = kwargs["secrets"] + self.tenant_id = kwargs["tenant_id"] + self.thumbprint = kwargs["thumbprint"] class CertificateDatastoreSecrets(DatastoreSecrets): @@ -6006,25 +5683,22 @@ class CertificateDatastoreSecrets(DatastoreSecrets): """ _validation = { - 'secrets_type': {'required': True}, + "secrets_type": {"required": True}, } _attribute_map = { - 'secrets_type': {'key': 'secretsType', 'type': 'str'}, - 'certificate': {'key': 'certificate', 'type': 'str'}, + "secrets_type": {"key": "secretsType", "type": "str"}, + "certificate": {"key": "certificate", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword certificate: Service principal certificate. :paramtype certificate: str """ super(CertificateDatastoreSecrets, self).__init__(**kwargs) - self.secrets_type = 'Certificate' # type: str - self.certificate = kwargs.get('certificate', None) + self.secrets_type = "Certificate" # type: str + self.certificate = kwargs.get("certificate", None) class TableVertical(msrest.serialization.Model): @@ -6068,24 +5742,21 @@ class TableVertical(msrest.serialization.Model): """ _attribute_map = { - 'cv_split_column_names': {'key': 'cvSplitColumnNames', 'type': '[str]'}, - 'featurization_settings': {'key': 'featurizationSettings', 'type': 'TableVerticalFeaturizationSettings'}, - 'fixed_parameters': {'key': 'fixedParameters', 'type': 'TableFixedParameters'}, - 'limit_settings': {'key': 'limitSettings', 'type': 'TableVerticalLimitSettings'}, - 'n_cross_validations': {'key': 'nCrossValidations', 'type': 'NCrossValidations'}, - 'search_space': {'key': 'searchSpace', 'type': '[TableParameterSubspace]'}, - 'sweep_settings': {'key': 'sweepSettings', 'type': 'TableSweepSettings'}, - 'test_data': {'key': 'testData', 'type': 'MLTableJobInput'}, - 'test_data_size': {'key': 'testDataSize', 'type': 'float'}, - 'validation_data': {'key': 'validationData', 'type': 'MLTableJobInput'}, - 'validation_data_size': {'key': 'validationDataSize', 'type': 'float'}, - 'weight_column_name': {'key': 'weightColumnName', 'type': 'str'}, + "cv_split_column_names": {"key": "cvSplitColumnNames", "type": "[str]"}, + "featurization_settings": {"key": "featurizationSettings", "type": "TableVerticalFeaturizationSettings"}, + "fixed_parameters": {"key": "fixedParameters", "type": "TableFixedParameters"}, + "limit_settings": {"key": "limitSettings", "type": "TableVerticalLimitSettings"}, + "n_cross_validations": {"key": "nCrossValidations", "type": "NCrossValidations"}, + "search_space": {"key": "searchSpace", "type": "[TableParameterSubspace]"}, + "sweep_settings": {"key": "sweepSettings", "type": "TableSweepSettings"}, + "test_data": {"key": "testData", "type": "MLTableJobInput"}, + "test_data_size": {"key": "testDataSize", "type": "float"}, + "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, + "validation_data_size": {"key": "validationDataSize", "type": "float"}, + "weight_column_name": {"key": "weightColumnName", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword cv_split_column_names: Columns to use for CVSplit data. :paramtype cv_split_column_names: list[str] @@ -6127,18 +5798,18 @@ def __init__( :paramtype weight_column_name: str """ super(TableVertical, self).__init__(**kwargs) - self.cv_split_column_names = kwargs.get('cv_split_column_names', None) - self.featurization_settings = kwargs.get('featurization_settings', None) - self.fixed_parameters = kwargs.get('fixed_parameters', None) - self.limit_settings = kwargs.get('limit_settings', None) - self.n_cross_validations = kwargs.get('n_cross_validations', None) - self.search_space = kwargs.get('search_space', None) - self.sweep_settings = kwargs.get('sweep_settings', None) - self.test_data = kwargs.get('test_data', None) - self.test_data_size = kwargs.get('test_data_size', None) - self.validation_data = kwargs.get('validation_data', None) - self.validation_data_size = kwargs.get('validation_data_size', None) - self.weight_column_name = kwargs.get('weight_column_name', None) + self.cv_split_column_names = kwargs.get("cv_split_column_names", None) + self.featurization_settings = kwargs.get("featurization_settings", None) + self.fixed_parameters = kwargs.get("fixed_parameters", None) + self.limit_settings = kwargs.get("limit_settings", None) + self.n_cross_validations = kwargs.get("n_cross_validations", None) + self.search_space = kwargs.get("search_space", None) + self.sweep_settings = kwargs.get("sweep_settings", None) + self.test_data = kwargs.get("test_data", None) + self.test_data_size = kwargs.get("test_data_size", None) + self.validation_data = kwargs.get("validation_data", None) + self.validation_data_size = kwargs.get("validation_data_size", None) + self.weight_column_name = kwargs.get("weight_column_name", None) class Classification(AutoMLVertical, TableVertical): @@ -6206,36 +5877,33 @@ class Classification(AutoMLVertical, TableVertical): """ _validation = { - 'task_type': {'required': True}, - 'training_data': {'required': True}, + "task_type": {"required": True}, + "training_data": {"required": True}, } _attribute_map = { - 'cv_split_column_names': {'key': 'cvSplitColumnNames', 'type': '[str]'}, - 'featurization_settings': {'key': 'featurizationSettings', 'type': 'TableVerticalFeaturizationSettings'}, - 'fixed_parameters': {'key': 'fixedParameters', 'type': 'TableFixedParameters'}, - 'limit_settings': {'key': 'limitSettings', 'type': 'TableVerticalLimitSettings'}, - 'n_cross_validations': {'key': 'nCrossValidations', 'type': 'NCrossValidations'}, - 'search_space': {'key': 'searchSpace', 'type': '[TableParameterSubspace]'}, - 'sweep_settings': {'key': 'sweepSettings', 'type': 'TableSweepSettings'}, - 'test_data': {'key': 'testData', 'type': 'MLTableJobInput'}, - 'test_data_size': {'key': 'testDataSize', 'type': 'float'}, - 'validation_data': {'key': 'validationData', 'type': 'MLTableJobInput'}, - 'validation_data_size': {'key': 'validationDataSize', 'type': 'float'}, - 'weight_column_name': {'key': 'weightColumnName', 'type': 'str'}, - 'log_verbosity': {'key': 'logVerbosity', 'type': 'str'}, - 'target_column_name': {'key': 'targetColumnName', 'type': 'str'}, - 'task_type': {'key': 'taskType', 'type': 'str'}, - 'training_data': {'key': 'trainingData', 'type': 'MLTableJobInput'}, - 'positive_label': {'key': 'positiveLabel', 'type': 'str'}, - 'primary_metric': {'key': 'primaryMetric', 'type': 'str'}, - 'training_settings': {'key': 'trainingSettings', 'type': 'ClassificationTrainingSettings'}, + "cv_split_column_names": {"key": "cvSplitColumnNames", "type": "[str]"}, + "featurization_settings": {"key": "featurizationSettings", "type": "TableVerticalFeaturizationSettings"}, + "fixed_parameters": {"key": "fixedParameters", "type": "TableFixedParameters"}, + "limit_settings": {"key": "limitSettings", "type": "TableVerticalLimitSettings"}, + "n_cross_validations": {"key": "nCrossValidations", "type": "NCrossValidations"}, + "search_space": {"key": "searchSpace", "type": "[TableParameterSubspace]"}, + "sweep_settings": {"key": "sweepSettings", "type": "TableSweepSettings"}, + "test_data": {"key": "testData", "type": "MLTableJobInput"}, + "test_data_size": {"key": "testDataSize", "type": "float"}, + "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, + "validation_data_size": {"key": "validationDataSize", "type": "float"}, + "weight_column_name": {"key": "weightColumnName", "type": "str"}, + "log_verbosity": {"key": "logVerbosity", "type": "str"}, + "target_column_name": {"key": "targetColumnName", "type": "str"}, + "task_type": {"key": "taskType", "type": "str"}, + "training_data": {"key": "trainingData", "type": "MLTableJobInput"}, + "positive_label": {"key": "positiveLabel", "type": "str"}, + "primary_metric": {"key": "primaryMetric", "type": "str"}, + "training_settings": {"key": "trainingSettings", "type": "ClassificationTrainingSettings"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword cv_split_column_names: Columns to use for CVSplit data. :paramtype cv_split_column_names: list[str] @@ -6294,25 +5962,25 @@ def __init__( ~azure.mgmt.machinelearningservices.models.ClassificationTrainingSettings """ super(Classification, self).__init__(**kwargs) - self.cv_split_column_names = kwargs.get('cv_split_column_names', None) - self.featurization_settings = kwargs.get('featurization_settings', None) - self.fixed_parameters = kwargs.get('fixed_parameters', None) - self.limit_settings = kwargs.get('limit_settings', None) - self.n_cross_validations = kwargs.get('n_cross_validations', None) - self.search_space = kwargs.get('search_space', None) - self.sweep_settings = kwargs.get('sweep_settings', None) - self.test_data = kwargs.get('test_data', None) - self.test_data_size = kwargs.get('test_data_size', None) - self.validation_data = kwargs.get('validation_data', None) - self.validation_data_size = kwargs.get('validation_data_size', None) - self.weight_column_name = kwargs.get('weight_column_name', None) - self.task_type = 'Classification' # type: str - self.positive_label = kwargs.get('positive_label', None) - self.primary_metric = kwargs.get('primary_metric', None) - self.training_settings = kwargs.get('training_settings', None) - self.log_verbosity = kwargs.get('log_verbosity', None) - self.target_column_name = kwargs.get('target_column_name', None) - self.training_data = kwargs['training_data'] + self.cv_split_column_names = kwargs.get("cv_split_column_names", None) + self.featurization_settings = kwargs.get("featurization_settings", None) + self.fixed_parameters = kwargs.get("fixed_parameters", None) + self.limit_settings = kwargs.get("limit_settings", None) + self.n_cross_validations = kwargs.get("n_cross_validations", None) + self.search_space = kwargs.get("search_space", None) + self.sweep_settings = kwargs.get("sweep_settings", None) + self.test_data = kwargs.get("test_data", None) + self.test_data_size = kwargs.get("test_data_size", None) + self.validation_data = kwargs.get("validation_data", None) + self.validation_data_size = kwargs.get("validation_data_size", None) + self.weight_column_name = kwargs.get("weight_column_name", None) + self.task_type = "Classification" # type: str + self.positive_label = kwargs.get("positive_label", None) + self.primary_metric = kwargs.get("primary_metric", None) + self.training_settings = kwargs.get("training_settings", None) + self.log_verbosity = kwargs.get("log_verbosity", None) + self.target_column_name = kwargs.get("target_column_name", None) + self.training_data = kwargs["training_data"] class ModelPerformanceMetricThresholdBase(msrest.serialization.Model): @@ -6332,23 +6000,22 @@ class ModelPerformanceMetricThresholdBase(msrest.serialization.Model): """ _validation = { - 'model_type': {'required': True}, + "model_type": {"required": True}, } _attribute_map = { - 'model_type': {'key': 'modelType', 'type': 'str'}, - 'threshold': {'key': 'threshold', 'type': 'MonitoringThreshold'}, + "model_type": {"key": "modelType", "type": "str"}, + "threshold": {"key": "threshold", "type": "MonitoringThreshold"}, } _subtype_map = { - 'model_type': {'Classification': 'ClassificationModelPerformanceMetricThreshold', - 'Regression': 'RegressionModelPerformanceMetricThreshold'} + "model_type": { + "Classification": "ClassificationModelPerformanceMetricThreshold", + "Regression": "RegressionModelPerformanceMetricThreshold", + } } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword threshold: The threshold value. If null, a default value will be set depending on the selected metric. @@ -6356,7 +6023,7 @@ def __init__( """ super(ModelPerformanceMetricThresholdBase, self).__init__(**kwargs) self.model_type = None # type: Optional[str] - self.threshold = kwargs.get('threshold', None) + self.threshold = kwargs.get("threshold", None) class ClassificationModelPerformanceMetricThreshold(ModelPerformanceMetricThresholdBase): @@ -6377,20 +6044,17 @@ class ClassificationModelPerformanceMetricThreshold(ModelPerformanceMetricThresh """ _validation = { - 'model_type': {'required': True}, - 'metric': {'required': True}, + "model_type": {"required": True}, + "metric": {"required": True}, } _attribute_map = { - 'model_type': {'key': 'modelType', 'type': 'str'}, - 'threshold': {'key': 'threshold', 'type': 'MonitoringThreshold'}, - 'metric': {'key': 'metric', 'type': 'str'}, + "model_type": {"key": "modelType", "type": "str"}, + "threshold": {"key": "threshold", "type": "MonitoringThreshold"}, + "metric": {"key": "metric", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword threshold: The threshold value. If null, a default value will be set depending on the selected metric. @@ -6401,8 +6065,8 @@ def __init__( ~azure.mgmt.machinelearningservices.models.ClassificationModelPerformanceMetric """ super(ClassificationModelPerformanceMetricThreshold, self).__init__(**kwargs) - self.model_type = 'Classification' # type: str - self.metric = kwargs['metric'] + self.model_type = "Classification" # type: str + self.metric = kwargs["metric"] class TrainingSettings(msrest.serialization.Model): @@ -6436,20 +6100,17 @@ class TrainingSettings(msrest.serialization.Model): """ _attribute_map = { - 'enable_dnn_training': {'key': 'enableDnnTraining', 'type': 'bool'}, - 'enable_model_explainability': {'key': 'enableModelExplainability', 'type': 'bool'}, - 'enable_onnx_compatible_models': {'key': 'enableOnnxCompatibleModels', 'type': 'bool'}, - 'enable_stack_ensemble': {'key': 'enableStackEnsemble', 'type': 'bool'}, - 'enable_vote_ensemble': {'key': 'enableVoteEnsemble', 'type': 'bool'}, - 'ensemble_model_download_timeout': {'key': 'ensembleModelDownloadTimeout', 'type': 'duration'}, - 'stack_ensemble_settings': {'key': 'stackEnsembleSettings', 'type': 'StackEnsembleSettings'}, - 'training_mode': {'key': 'trainingMode', 'type': 'str'}, + "enable_dnn_training": {"key": "enableDnnTraining", "type": "bool"}, + "enable_model_explainability": {"key": "enableModelExplainability", "type": "bool"}, + "enable_onnx_compatible_models": {"key": "enableOnnxCompatibleModels", "type": "bool"}, + "enable_stack_ensemble": {"key": "enableStackEnsemble", "type": "bool"}, + "enable_vote_ensemble": {"key": "enableVoteEnsemble", "type": "bool"}, + "ensemble_model_download_timeout": {"key": "ensembleModelDownloadTimeout", "type": "duration"}, + "stack_ensemble_settings": {"key": "stackEnsembleSettings", "type": "StackEnsembleSettings"}, + "training_mode": {"key": "trainingMode", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword enable_dnn_training: Enable recommendation of DNN models. :paramtype enable_dnn_training: bool @@ -6478,14 +6139,14 @@ def __init__( :paramtype training_mode: str or ~azure.mgmt.machinelearningservices.models.TrainingMode """ super(TrainingSettings, self).__init__(**kwargs) - self.enable_dnn_training = kwargs.get('enable_dnn_training', False) - self.enable_model_explainability = kwargs.get('enable_model_explainability', True) - self.enable_onnx_compatible_models = kwargs.get('enable_onnx_compatible_models', False) - self.enable_stack_ensemble = kwargs.get('enable_stack_ensemble', True) - self.enable_vote_ensemble = kwargs.get('enable_vote_ensemble', True) - self.ensemble_model_download_timeout = kwargs.get('ensemble_model_download_timeout', "PT5M") - self.stack_ensemble_settings = kwargs.get('stack_ensemble_settings', None) - self.training_mode = kwargs.get('training_mode', None) + self.enable_dnn_training = kwargs.get("enable_dnn_training", False) + self.enable_model_explainability = kwargs.get("enable_model_explainability", True) + self.enable_onnx_compatible_models = kwargs.get("enable_onnx_compatible_models", False) + self.enable_stack_ensemble = kwargs.get("enable_stack_ensemble", True) + self.enable_vote_ensemble = kwargs.get("enable_vote_ensemble", True) + self.ensemble_model_download_timeout = kwargs.get("ensemble_model_download_timeout", "PT5M") + self.stack_ensemble_settings = kwargs.get("stack_ensemble_settings", None) + self.training_mode = kwargs.get("training_mode", None) class ClassificationTrainingSettings(TrainingSettings): @@ -6525,22 +6186,19 @@ class ClassificationTrainingSettings(TrainingSettings): """ _attribute_map = { - 'enable_dnn_training': {'key': 'enableDnnTraining', 'type': 'bool'}, - 'enable_model_explainability': {'key': 'enableModelExplainability', 'type': 'bool'}, - 'enable_onnx_compatible_models': {'key': 'enableOnnxCompatibleModels', 'type': 'bool'}, - 'enable_stack_ensemble': {'key': 'enableStackEnsemble', 'type': 'bool'}, - 'enable_vote_ensemble': {'key': 'enableVoteEnsemble', 'type': 'bool'}, - 'ensemble_model_download_timeout': {'key': 'ensembleModelDownloadTimeout', 'type': 'duration'}, - 'stack_ensemble_settings': {'key': 'stackEnsembleSettings', 'type': 'StackEnsembleSettings'}, - 'training_mode': {'key': 'trainingMode', 'type': 'str'}, - 'allowed_training_algorithms': {'key': 'allowedTrainingAlgorithms', 'type': '[str]'}, - 'blocked_training_algorithms': {'key': 'blockedTrainingAlgorithms', 'type': '[str]'}, + "enable_dnn_training": {"key": "enableDnnTraining", "type": "bool"}, + "enable_model_explainability": {"key": "enableModelExplainability", "type": "bool"}, + "enable_onnx_compatible_models": {"key": "enableOnnxCompatibleModels", "type": "bool"}, + "enable_stack_ensemble": {"key": "enableStackEnsemble", "type": "bool"}, + "enable_vote_ensemble": {"key": "enableVoteEnsemble", "type": "bool"}, + "ensemble_model_download_timeout": {"key": "ensembleModelDownloadTimeout", "type": "duration"}, + "stack_ensemble_settings": {"key": "stackEnsembleSettings", "type": "StackEnsembleSettings"}, + "training_mode": {"key": "trainingMode", "type": "str"}, + "allowed_training_algorithms": {"key": "allowedTrainingAlgorithms", "type": "[str]"}, + "blocked_training_algorithms": {"key": "blockedTrainingAlgorithms", "type": "[str]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword enable_dnn_training: Enable recommendation of DNN models. :paramtype enable_dnn_training: bool @@ -6575,8 +6233,8 @@ def __init__( ~azure.mgmt.machinelearningservices.models.ClassificationModels] """ super(ClassificationTrainingSettings, self).__init__(**kwargs) - self.allowed_training_algorithms = kwargs.get('allowed_training_algorithms', None) - self.blocked_training_algorithms = kwargs.get('blocked_training_algorithms', None) + self.allowed_training_algorithms = kwargs.get("allowed_training_algorithms", None) + self.blocked_training_algorithms = kwargs.get("blocked_training_algorithms", None) class ClusterUpdateParameters(msrest.serialization.Model): @@ -6587,19 +6245,16 @@ class ClusterUpdateParameters(msrest.serialization.Model): """ _attribute_map = { - 'properties': {'key': 'properties.properties', 'type': 'ScaleSettingsInformation'}, + "properties": {"key": "properties.properties", "type": "ScaleSettingsInformation"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword properties: Properties of ClusterUpdate. :paramtype properties: ~azure.mgmt.machinelearningservices.models.ScaleSettingsInformation """ super(ClusterUpdateParameters, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) + self.properties = kwargs.get("properties", None) class ExportSummary(msrest.serialization.Model): @@ -6626,31 +6281,27 @@ class ExportSummary(msrest.serialization.Model): """ _validation = { - 'end_date_time': {'readonly': True}, - 'exported_row_count': {'readonly': True}, - 'format': {'required': True}, - 'labeling_job_id': {'readonly': True}, - 'start_date_time': {'readonly': True}, + "end_date_time": {"readonly": True}, + "exported_row_count": {"readonly": True}, + "format": {"required": True}, + "labeling_job_id": {"readonly": True}, + "start_date_time": {"readonly": True}, } _attribute_map = { - 'end_date_time': {'key': 'endDateTime', 'type': 'iso-8601'}, - 'exported_row_count': {'key': 'exportedRowCount', 'type': 'long'}, - 'format': {'key': 'format', 'type': 'str'}, - 'labeling_job_id': {'key': 'labelingJobId', 'type': 'str'}, - 'start_date_time': {'key': 'startDateTime', 'type': 'iso-8601'}, + "end_date_time": {"key": "endDateTime", "type": "iso-8601"}, + "exported_row_count": {"key": "exportedRowCount", "type": "long"}, + "format": {"key": "format", "type": "str"}, + "labeling_job_id": {"key": "labelingJobId", "type": "str"}, + "start_date_time": {"key": "startDateTime", "type": "iso-8601"}, } _subtype_map = { - 'format': {'CSV': 'CsvExportSummary', 'Coco': 'CocoExportSummary', 'Dataset': 'DatasetExportSummary'} + "format": {"CSV": "CsvExportSummary", "Coco": "CocoExportSummary", "Dataset": "DatasetExportSummary"} } - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(ExportSummary, self).__init__(**kwargs) self.end_date_time = None self.exported_row_count = None @@ -6684,33 +6335,29 @@ class CocoExportSummary(ExportSummary): """ _validation = { - 'end_date_time': {'readonly': True}, - 'exported_row_count': {'readonly': True}, - 'format': {'required': True}, - 'labeling_job_id': {'readonly': True}, - 'start_date_time': {'readonly': True}, - 'container_name': {'readonly': True}, - 'snapshot_path': {'readonly': True}, + "end_date_time": {"readonly": True}, + "exported_row_count": {"readonly": True}, + "format": {"required": True}, + "labeling_job_id": {"readonly": True}, + "start_date_time": {"readonly": True}, + "container_name": {"readonly": True}, + "snapshot_path": {"readonly": True}, } _attribute_map = { - 'end_date_time': {'key': 'endDateTime', 'type': 'iso-8601'}, - 'exported_row_count': {'key': 'exportedRowCount', 'type': 'long'}, - 'format': {'key': 'format', 'type': 'str'}, - 'labeling_job_id': {'key': 'labelingJobId', 'type': 'str'}, - 'start_date_time': {'key': 'startDateTime', 'type': 'iso-8601'}, - 'container_name': {'key': 'containerName', 'type': 'str'}, - 'snapshot_path': {'key': 'snapshotPath', 'type': 'str'}, + "end_date_time": {"key": "endDateTime", "type": "iso-8601"}, + "exported_row_count": {"key": "exportedRowCount", "type": "long"}, + "format": {"key": "format", "type": "str"}, + "labeling_job_id": {"key": "labelingJobId", "type": "str"}, + "start_date_time": {"key": "startDateTime", "type": "iso-8601"}, + "container_name": {"key": "containerName", "type": "str"}, + "snapshot_path": {"key": "snapshotPath", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(CocoExportSummary, self).__init__(**kwargs) - self.format = 'Coco' # type: str + self.format = "Coco" # type: str self.container_name = None self.snapshot_path = None @@ -6727,18 +6374,15 @@ class CodeConfiguration(msrest.serialization.Model): """ _validation = { - 'scoring_script': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "scoring_script": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'code_id': {'key': 'codeId', 'type': 'str'}, - 'scoring_script': {'key': 'scoringScript', 'type': 'str'}, + "code_id": {"key": "codeId", "type": "str"}, + "scoring_script": {"key": "scoringScript", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword code_id: ARM resource ID of the code asset. :paramtype code_id: str @@ -6746,8 +6390,8 @@ def __init__( :paramtype scoring_script: str """ super(CodeConfiguration, self).__init__(**kwargs) - self.code_id = kwargs.get('code_id', None) - self.scoring_script = kwargs['scoring_script'] + self.code_id = kwargs.get("code_id", None) + self.scoring_script = kwargs["scoring_script"] class ProxyResource(Resource): @@ -6769,25 +6413,21 @@ class ProxyResource(Resource): """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, } - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(ProxyResource, self).__init__(**kwargs) @@ -6814,31 +6454,28 @@ class CodeContainer(ProxyResource): """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'properties': {'key': 'properties', 'type': 'CodeContainerProperties'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "CodeContainerProperties"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword properties: Required. [Required] Additional attributes of the entity. :paramtype properties: ~azure.mgmt.machinelearningservices.models.CodeContainerProperties """ super(CodeContainer, self).__init__(**kwargs) - self.properties = kwargs['properties'] + self.properties = kwargs["properties"] class CodeContainerProperties(AssetContainer): @@ -6865,25 +6502,22 @@ class CodeContainerProperties(AssetContainer): """ _validation = { - 'latest_version': {'readonly': True}, - 'next_version': {'readonly': True}, - 'provisioning_state': {'readonly': True}, + "latest_version": {"readonly": True}, + "next_version": {"readonly": True}, + "provisioning_state": {"readonly": True}, } _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'latest_version': {'key': 'latestVersion', 'type': 'str'}, - 'next_version': {'key': 'nextVersion', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "latest_version": {"key": "latestVersion", "type": "str"}, + "next_version": {"key": "nextVersion", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword description: The asset description text. :paramtype description: str @@ -6909,14 +6543,11 @@ class CodeContainerResourceArmPaginatedResult(msrest.serialization.Model): """ _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[CodeContainer]'}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[CodeContainer]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword next_link: The link to the next page of CodeContainer objects. If null, there are no additional pages. @@ -6925,8 +6556,8 @@ def __init__( :paramtype value: list[~azure.mgmt.machinelearningservices.models.CodeContainer] """ super(CodeContainerResourceArmPaginatedResult, self).__init__(**kwargs) - self.next_link = kwargs.get('next_link', None) - self.value = kwargs.get('value', None) + self.next_link = kwargs.get("next_link", None) + self.value = kwargs.get("value", None) class CodeVersion(ProxyResource): @@ -6952,31 +6583,28 @@ class CodeVersion(ProxyResource): """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'properties': {'key': 'properties', 'type': 'CodeVersionProperties'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "CodeVersionProperties"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword properties: Required. [Required] Additional attributes of the entity. :paramtype properties: ~azure.mgmt.machinelearningservices.models.CodeVersionProperties """ super(CodeVersion, self).__init__(**kwargs) - self.properties = kwargs['properties'] + self.properties = kwargs["properties"] class CodeVersionProperties(AssetBase): @@ -7007,24 +6635,21 @@ class CodeVersionProperties(AssetBase): """ _validation = { - 'provisioning_state': {'readonly': True}, + "provisioning_state": {"readonly": True}, } _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'auto_delete_setting': {'key': 'autoDeleteSetting', 'type': 'AutoDeleteSetting'}, - 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'code_uri': {'key': 'codeUri', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "auto_delete_setting": {"key": "autoDeleteSetting", "type": "AutoDeleteSetting"}, + "is_anonymous": {"key": "isAnonymous", "type": "bool"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "code_uri": {"key": "codeUri", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword description: The asset description text. :paramtype description: str @@ -7044,7 +6669,7 @@ def __init__( :paramtype code_uri: str """ super(CodeVersionProperties, self).__init__(**kwargs) - self.code_uri = kwargs.get('code_uri', None) + self.code_uri = kwargs.get("code_uri", None) self.provisioning_state = None @@ -7059,14 +6684,11 @@ class CodeVersionResourceArmPaginatedResult(msrest.serialization.Model): """ _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[CodeVersion]'}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[CodeVersion]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword next_link: The link to the next page of CodeVersion objects. If null, there are no additional pages. @@ -7075,8 +6697,8 @@ def __init__( :paramtype value: list[~azure.mgmt.machinelearningservices.models.CodeVersion] """ super(CodeVersionResourceArmPaginatedResult, self).__init__(**kwargs) - self.next_link = kwargs.get('next_link', None) - self.value = kwargs.get('value', None) + self.next_link = kwargs.get("next_link", None) + self.value = kwargs.get("value", None) class CognitiveServiceEndpointDeploymentResourceProperties(msrest.serialization.Model): @@ -7097,20 +6719,17 @@ class CognitiveServiceEndpointDeploymentResourceProperties(msrest.serialization. """ _validation = { - 'model': {'required': True}, + "model": {"required": True}, } _attribute_map = { - 'model': {'key': 'model', 'type': 'EndpointDeploymentModel'}, - 'rai_policy_name': {'key': 'raiPolicyName', 'type': 'str'}, - 'sku': {'key': 'sku', 'type': 'CognitiveServicesSku'}, - 'version_upgrade_option': {'key': 'versionUpgradeOption', 'type': 'str'}, + "model": {"key": "model", "type": "EndpointDeploymentModel"}, + "rai_policy_name": {"key": "raiPolicyName", "type": "str"}, + "sku": {"key": "sku", "type": "CognitiveServicesSku"}, + "version_upgrade_option": {"key": "versionUpgradeOption", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword model: Required. Model used for the endpoint deployment. :paramtype model: ~azure.mgmt.machinelearningservices.models.EndpointDeploymentModel @@ -7124,10 +6743,10 @@ def __init__( ~azure.mgmt.machinelearningservices.models.DeploymentModelVersionUpgradeOption """ super(CognitiveServiceEndpointDeploymentResourceProperties, self).__init__(**kwargs) - self.model = kwargs['model'] - self.rai_policy_name = kwargs.get('rai_policy_name', None) - self.sku = kwargs.get('sku', None) - self.version_upgrade_option = kwargs.get('version_upgrade_option', None) + self.model = kwargs["model"] + self.rai_policy_name = kwargs.get("rai_policy_name", None) + self.sku = kwargs.get("sku", None) + self.version_upgrade_option = kwargs.get("version_upgrade_option", None) class CognitiveServicesSku(msrest.serialization.Model): @@ -7146,17 +6765,14 @@ class CognitiveServicesSku(msrest.serialization.Model): """ _attribute_map = { - 'capacity': {'key': 'capacity', 'type': 'int'}, - 'family': {'key': 'family', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'size': {'key': 'size', 'type': 'str'}, - 'tier': {'key': 'tier', 'type': 'str'}, + "capacity": {"key": "capacity", "type": "int"}, + "family": {"key": "family", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "size": {"key": "size", "type": "str"}, + "tier": {"key": "tier", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword capacity: :paramtype capacity: int @@ -7170,11 +6786,11 @@ def __init__( :paramtype tier: str """ super(CognitiveServicesSku, self).__init__(**kwargs) - self.capacity = kwargs.get('capacity', None) - self.family = kwargs.get('family', None) - self.name = kwargs.get('name', None) - self.size = kwargs.get('size', None) - self.tier = kwargs.get('tier', None) + self.capacity = kwargs.get("capacity", None) + self.family = kwargs.get("family", None) + self.name = kwargs.get("name", None) + self.size = kwargs.get("size", None) + self.tier = kwargs.get("tier", None) class Collection(msrest.serialization.Model): @@ -7196,16 +6812,13 @@ class Collection(msrest.serialization.Model): """ _attribute_map = { - 'client_id': {'key': 'clientId', 'type': 'str'}, - 'data_collection_mode': {'key': 'dataCollectionMode', 'type': 'str'}, - 'data_id': {'key': 'dataId', 'type': 'str'}, - 'sampling_rate': {'key': 'samplingRate', 'type': 'float'}, + "client_id": {"key": "clientId", "type": "str"}, + "data_collection_mode": {"key": "dataCollectionMode", "type": "str"}, + "data_id": {"key": "dataId", "type": "str"}, + "sampling_rate": {"key": "samplingRate", "type": "float"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword client_id: The msi client id used to collect logging to blob storage. If it's null,backend will pick a registered endpoint identity to auth. @@ -7222,10 +6835,10 @@ def __init__( :paramtype sampling_rate: float """ super(Collection, self).__init__(**kwargs) - self.client_id = kwargs.get('client_id', None) - self.data_collection_mode = kwargs.get('data_collection_mode', None) - self.data_id = kwargs.get('data_id', None) - self.sampling_rate = kwargs.get('sampling_rate', 1) + self.client_id = kwargs.get("client_id", None) + self.data_collection_mode = kwargs.get("data_collection_mode", None) + self.data_id = kwargs.get("data_id", None) + self.sampling_rate = kwargs.get("sampling_rate", 1) class ColumnTransformer(msrest.serialization.Model): @@ -7239,14 +6852,11 @@ class ColumnTransformer(msrest.serialization.Model): """ _attribute_map = { - 'fields': {'key': 'fields', 'type': '[str]'}, - 'parameters': {'key': 'parameters', 'type': 'object'}, + "fields": {"key": "fields", "type": "[str]"}, + "parameters": {"key": "parameters", "type": "object"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword fields: Fields to apply transformer logic on. :paramtype fields: list[str] @@ -7255,8 +6865,8 @@ def __init__( :paramtype parameters: any """ super(ColumnTransformer, self).__init__(**kwargs) - self.fields = kwargs.get('fields', None) - self.parameters = kwargs.get('parameters', None) + self.fields = kwargs.get("fields", None) + self.parameters = kwargs.get("parameters", None) class CommandJob(JobBaseProperties): @@ -7334,46 +6944,43 @@ class CommandJob(JobBaseProperties): """ _validation = { - 'job_type': {'required': True}, - 'status': {'readonly': True}, - 'command': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'environment_id': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'parameters': {'readonly': True}, - } - - _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'component_id': {'key': 'componentId', 'type': 'str'}, - 'compute_id': {'key': 'computeId', 'type': 'str'}, - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'experiment_name': {'key': 'experimentName', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'IdentityConfiguration'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'job_type': {'key': 'jobType', 'type': 'str'}, - 'notification_setting': {'key': 'notificationSetting', 'type': 'NotificationSetting'}, - 'secrets_configuration': {'key': 'secretsConfiguration', 'type': '{SecretConfiguration}'}, - 'services': {'key': 'services', 'type': '{JobService}'}, - 'status': {'key': 'status', 'type': 'str'}, - 'autologger_settings': {'key': 'autologgerSettings', 'type': 'AutologgerSettings'}, - 'code_id': {'key': 'codeId', 'type': 'str'}, - 'command': {'key': 'command', 'type': 'str'}, - 'distribution': {'key': 'distribution', 'type': 'DistributionConfiguration'}, - 'environment_id': {'key': 'environmentId', 'type': 'str'}, - 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, - 'inputs': {'key': 'inputs', 'type': '{JobInput}'}, - 'limits': {'key': 'limits', 'type': 'CommandJobLimits'}, - 'outputs': {'key': 'outputs', 'type': '{JobOutput}'}, - 'parameters': {'key': 'parameters', 'type': 'object'}, - 'queue_settings': {'key': 'queueSettings', 'type': 'QueueSettings'}, - 'resources': {'key': 'resources', 'type': 'JobResourceConfiguration'}, - } - - def __init__( - self, - **kwargs - ): + "job_type": {"required": True}, + "status": {"readonly": True}, + "command": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "environment_id": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "parameters": {"readonly": True}, + } + + _attribute_map = { + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "component_id": {"key": "componentId", "type": "str"}, + "compute_id": {"key": "computeId", "type": "str"}, + "display_name": {"key": "displayName", "type": "str"}, + "experiment_name": {"key": "experimentName", "type": "str"}, + "identity": {"key": "identity", "type": "IdentityConfiguration"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "job_type": {"key": "jobType", "type": "str"}, + "notification_setting": {"key": "notificationSetting", "type": "NotificationSetting"}, + "secrets_configuration": {"key": "secretsConfiguration", "type": "{SecretConfiguration}"}, + "services": {"key": "services", "type": "{JobService}"}, + "status": {"key": "status", "type": "str"}, + "autologger_settings": {"key": "autologgerSettings", "type": "AutologgerSettings"}, + "code_id": {"key": "codeId", "type": "str"}, + "command": {"key": "command", "type": "str"}, + "distribution": {"key": "distribution", "type": "DistributionConfiguration"}, + "environment_id": {"key": "environmentId", "type": "str"}, + "environment_variables": {"key": "environmentVariables", "type": "{str}"}, + "inputs": {"key": "inputs", "type": "{JobInput}"}, + "limits": {"key": "limits", "type": "CommandJobLimits"}, + "outputs": {"key": "outputs", "type": "{JobOutput}"}, + "parameters": {"key": "parameters", "type": "object"}, + "queue_settings": {"key": "queueSettings", "type": "QueueSettings"}, + "resources": {"key": "resources", "type": "JobResourceConfiguration"}, + } + + def __init__(self, **kwargs): """ :keyword description: The asset description text. :paramtype description: str @@ -7432,19 +7039,19 @@ def __init__( :paramtype resources: ~azure.mgmt.machinelearningservices.models.JobResourceConfiguration """ super(CommandJob, self).__init__(**kwargs) - self.job_type = 'Command' # type: str - self.autologger_settings = kwargs.get('autologger_settings', None) - self.code_id = kwargs.get('code_id', None) - self.command = kwargs['command'] - self.distribution = kwargs.get('distribution', None) - self.environment_id = kwargs['environment_id'] - self.environment_variables = kwargs.get('environment_variables', None) - self.inputs = kwargs.get('inputs', None) - self.limits = kwargs.get('limits', None) - self.outputs = kwargs.get('outputs', None) + self.job_type = "Command" # type: str + self.autologger_settings = kwargs.get("autologger_settings", None) + self.code_id = kwargs.get("code_id", None) + self.command = kwargs["command"] + self.distribution = kwargs.get("distribution", None) + self.environment_id = kwargs["environment_id"] + self.environment_variables = kwargs.get("environment_variables", None) + self.inputs = kwargs.get("inputs", None) + self.limits = kwargs.get("limits", None) + self.outputs = kwargs.get("outputs", None) self.parameters = None - self.queue_settings = kwargs.get('queue_settings', None) - self.resources = kwargs.get('resources', None) + self.queue_settings = kwargs.get("queue_settings", None) + self.resources = kwargs.get("resources", None) class JobLimits(msrest.serialization.Model): @@ -7464,22 +7071,17 @@ class JobLimits(msrest.serialization.Model): """ _validation = { - 'job_limits_type': {'required': True}, + "job_limits_type": {"required": True}, } _attribute_map = { - 'job_limits_type': {'key': 'jobLimitsType', 'type': 'str'}, - 'timeout': {'key': 'timeout', 'type': 'duration'}, + "job_limits_type": {"key": "jobLimitsType", "type": "str"}, + "timeout": {"key": "timeout", "type": "duration"}, } - _subtype_map = { - 'job_limits_type': {'Command': 'CommandJobLimits', 'Sweep': 'SweepJobLimits'} - } + _subtype_map = {"job_limits_type": {"Command": "CommandJobLimits", "Sweep": "SweepJobLimits"}} - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword timeout: The max run duration in ISO 8601 format, after which the job will be cancelled. Only supports duration with precision as low as Seconds. @@ -7487,7 +7089,7 @@ def __init__( """ super(JobLimits, self).__init__(**kwargs) self.job_limits_type = None # type: Optional[str] - self.timeout = kwargs.get('timeout', None) + self.timeout = kwargs.get("timeout", None) class CommandJobLimits(JobLimits): @@ -7504,25 +7106,22 @@ class CommandJobLimits(JobLimits): """ _validation = { - 'job_limits_type': {'required': True}, + "job_limits_type": {"required": True}, } _attribute_map = { - 'job_limits_type': {'key': 'jobLimitsType', 'type': 'str'}, - 'timeout': {'key': 'timeout', 'type': 'duration'}, + "job_limits_type": {"key": "jobLimitsType", "type": "str"}, + "timeout": {"key": "timeout", "type": "duration"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword timeout: The max run duration in ISO 8601 format, after which the job will be cancelled. Only supports duration with precision as low as Seconds. :paramtype timeout: ~datetime.timedelta """ super(CommandJobLimits, self).__init__(**kwargs) - self.job_limits_type = 'Command' # type: str + self.job_limits_type = "Command" # type: str class ComponentConfiguration(msrest.serialization.Model): @@ -7533,19 +7132,16 @@ class ComponentConfiguration(msrest.serialization.Model): """ _attribute_map = { - 'pipeline_settings': {'key': 'pipelineSettings', 'type': 'object'}, + "pipeline_settings": {"key": "pipelineSettings", "type": "object"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword pipeline_settings: Pipeline settings, for things like ContinueRunOnStepFailure etc. :paramtype pipeline_settings: any """ super(ComponentConfiguration, self).__init__(**kwargs) - self.pipeline_settings = kwargs.get('pipeline_settings', None) + self.pipeline_settings = kwargs.get("pipeline_settings", None) class ComponentContainer(ProxyResource): @@ -7571,81 +7167,75 @@ class ComponentContainer(ProxyResource): """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'properties': {'key': 'properties', 'type': 'ComponentContainerProperties'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "ComponentContainerProperties"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword properties: Required. [Required] Additional attributes of the entity. :paramtype properties: ~azure.mgmt.machinelearningservices.models.ComponentContainerProperties """ super(ComponentContainer, self).__init__(**kwargs) - self.properties = kwargs['properties'] + self.properties = kwargs["properties"] class ComponentContainerProperties(AssetContainer): """Component container definition. -.. raw:: html + .. raw:: html - . + . - Variables are only populated by the server, and will be ignored when sending a request. + Variables are only populated by the server, and will be ignored when sending a request. - :ivar description: The asset description text. - :vartype description: str - :ivar properties: The asset property dictionary. - :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :vartype tags: dict[str, str] - :ivar is_archived: Is the asset archived?. - :vartype is_archived: bool - :ivar latest_version: The latest version inside this container. - :vartype latest_version: str - :ivar next_version: The next auto incremental version. - :vartype next_version: str - :ivar provisioning_state: Provisioning state for the component container. Possible values - include: "Succeeded", "Failed", "Canceled", "Creating", "Updating", "Deleting". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.AssetProvisioningState + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar is_archived: Is the asset archived?. + :vartype is_archived: bool + :ivar latest_version: The latest version inside this container. + :vartype latest_version: str + :ivar next_version: The next auto incremental version. + :vartype next_version: str + :ivar provisioning_state: Provisioning state for the component container. Possible values + include: "Succeeded", "Failed", "Canceled", "Creating", "Updating", "Deleting". + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.AssetProvisioningState """ _validation = { - 'latest_version': {'readonly': True}, - 'next_version': {'readonly': True}, - 'provisioning_state': {'readonly': True}, + "latest_version": {"readonly": True}, + "next_version": {"readonly": True}, + "provisioning_state": {"readonly": True}, } _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'latest_version': {'key': 'latestVersion', 'type': 'str'}, - 'next_version': {'key': 'nextVersion', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "latest_version": {"key": "latestVersion", "type": "str"}, + "next_version": {"key": "nextVersion", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword description: The asset description text. :paramtype description: str @@ -7671,14 +7261,11 @@ class ComponentContainerResourceArmPaginatedResult(msrest.serialization.Model): """ _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[ComponentContainer]'}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[ComponentContainer]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword next_link: The link to the next page of ComponentContainer objects. If null, there are no additional pages. @@ -7687,8 +7274,8 @@ def __init__( :paramtype value: list[~azure.mgmt.machinelearningservices.models.ComponentContainer] """ super(ComponentContainerResourceArmPaginatedResult, self).__init__(**kwargs) - self.next_link = kwargs.get('next_link', None) - self.value = kwargs.get('value', None) + self.next_link = kwargs.get("next_link", None) + self.value = kwargs.get("value", None) class ComponentVersion(ProxyResource): @@ -7714,31 +7301,28 @@ class ComponentVersion(ProxyResource): """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'properties': {'key': 'properties', 'type': 'ComponentVersionProperties'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "ComponentVersionProperties"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword properties: Required. [Required] Additional attributes of the entity. :paramtype properties: ~azure.mgmt.machinelearningservices.models.ComponentVersionProperties """ super(ComponentVersion, self).__init__(**kwargs) - self.properties = kwargs['properties'] + self.properties = kwargs["properties"] class ComponentVersionProperties(AssetBase): @@ -7778,25 +7362,22 @@ class ComponentVersionProperties(AssetBase): """ _validation = { - 'provisioning_state': {'readonly': True}, + "provisioning_state": {"readonly": True}, } _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'auto_delete_setting': {'key': 'autoDeleteSetting', 'type': 'AutoDeleteSetting'}, - 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'component_spec': {'key': 'componentSpec', 'type': 'object'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'stage': {'key': 'stage', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "auto_delete_setting": {"key": "autoDeleteSetting", "type": "AutoDeleteSetting"}, + "is_anonymous": {"key": "isAnonymous", "type": "bool"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "component_spec": {"key": "componentSpec", "type": "object"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "stage": {"key": "stage", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword description: The asset description text. :paramtype description: str @@ -7825,9 +7406,9 @@ def __init__( :paramtype stage: str """ super(ComponentVersionProperties, self).__init__(**kwargs) - self.component_spec = kwargs.get('component_spec', None) + self.component_spec = kwargs.get("component_spec", None) self.provisioning_state = None - self.stage = kwargs.get('stage', None) + self.stage = kwargs.get("stage", None) class ComponentVersionResourceArmPaginatedResult(msrest.serialization.Model): @@ -7841,14 +7422,11 @@ class ComponentVersionResourceArmPaginatedResult(msrest.serialization.Model): """ _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[ComponentVersion]'}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[ComponentVersion]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword next_link: The link to the next page of ComponentVersion objects. If null, there are no additional pages. @@ -7857,8 +7435,8 @@ def __init__( :paramtype value: list[~azure.mgmt.machinelearningservices.models.ComponentVersion] """ super(ComponentVersionResourceArmPaginatedResult, self).__init__(**kwargs) - self.next_link = kwargs.get('next_link', None) - self.value = kwargs.get('value', None) + self.next_link = kwargs.get("next_link", None) + self.value = kwargs.get("value", None) class ComputeInstanceSchema(msrest.serialization.Model): @@ -7869,19 +7447,16 @@ class ComputeInstanceSchema(msrest.serialization.Model): """ _attribute_map = { - 'properties': {'key': 'properties', 'type': 'ComputeInstanceProperties'}, + "properties": {"key": "properties", "type": "ComputeInstanceProperties"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword properties: Properties of ComputeInstance. :paramtype properties: ~azure.mgmt.machinelearningservices.models.ComputeInstanceProperties """ super(ComputeInstanceSchema, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) + self.properties = kwargs.get("properties", None) class ComputeInstance(Compute, ComputeInstanceSchema): @@ -7923,32 +7498,29 @@ class ComputeInstance(Compute, ComputeInstanceSchema): """ _validation = { - 'compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, + "compute_type": {"required": True}, + "provisioning_state": {"readonly": True}, + "created_on": {"readonly": True}, + "modified_on": {"readonly": True}, + "provisioning_errors": {"readonly": True}, + "is_attached_compute": {"readonly": True}, } _attribute_map = { - 'properties': {'key': 'properties', 'type': 'ComputeInstanceProperties'}, - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, + "properties": {"key": "properties", "type": "ComputeInstanceProperties"}, + "compute_type": {"key": "computeType", "type": "str"}, + "compute_location": {"key": "computeLocation", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "created_on": {"key": "createdOn", "type": "iso-8601"}, + "modified_on": {"key": "modifiedOn", "type": "iso-8601"}, + "resource_id": {"key": "resourceId", "type": "str"}, + "provisioning_errors": {"key": "provisioningErrors", "type": "[ErrorResponse]"}, + "is_attached_compute": {"key": "isAttachedCompute", "type": "bool"}, + "disable_local_auth": {"key": "disableLocalAuth", "type": "bool"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword properties: Properties of ComputeInstance. :paramtype properties: ~azure.mgmt.machinelearningservices.models.ComputeInstanceProperties @@ -7963,17 +7535,17 @@ def __init__( :paramtype disable_local_auth: bool """ super(ComputeInstance, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) - self.compute_type = 'ComputeInstance' # type: str - self.compute_location = kwargs.get('compute_location', None) + self.properties = kwargs.get("properties", None) + self.compute_type = "ComputeInstance" # type: str + self.compute_location = kwargs.get("compute_location", None) self.provisioning_state = None - self.description = kwargs.get('description', None) + self.description = kwargs.get("description", None) self.created_on = None self.modified_on = None - self.resource_id = kwargs.get('resource_id', None) + self.resource_id = kwargs.get("resource_id", None) self.provisioning_errors = None self.is_attached_compute = None - self.disable_local_auth = kwargs.get('disable_local_auth', None) + self.disable_local_auth = kwargs.get("disable_local_auth", None) class ComputeInstanceApplication(msrest.serialization.Model): @@ -7986,14 +7558,11 @@ class ComputeInstanceApplication(msrest.serialization.Model): """ _attribute_map = { - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'endpoint_uri': {'key': 'endpointUri', 'type': 'str'}, + "display_name": {"key": "displayName", "type": "str"}, + "endpoint_uri": {"key": "endpointUri", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword display_name: Name of the ComputeInstance application. :paramtype display_name: str @@ -8001,8 +7570,8 @@ def __init__( :paramtype endpoint_uri: str """ super(ComputeInstanceApplication, self).__init__(**kwargs) - self.display_name = kwargs.get('display_name', None) - self.endpoint_uri = kwargs.get('endpoint_uri', None) + self.display_name = kwargs.get("display_name", None) + self.endpoint_uri = kwargs.get("endpoint_uri", None) class ComputeInstanceAutologgerSettings(msrest.serialization.Model): @@ -8014,13 +7583,10 @@ class ComputeInstanceAutologgerSettings(msrest.serialization.Model): """ _attribute_map = { - 'mlflow_autologger': {'key': 'mlflowAutologger', 'type': 'str'}, + "mlflow_autologger": {"key": "mlflowAutologger", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword mlflow_autologger: Indicates whether mlflow autologger is enabled for notebooks. Possible values include: "Enabled", "Disabled". @@ -8028,7 +7594,7 @@ def __init__( ~azure.mgmt.machinelearningservices.models.MlflowAutologger """ super(ComputeInstanceAutologgerSettings, self).__init__(**kwargs) - self.mlflow_autologger = kwargs.get('mlflow_autologger', None) + self.mlflow_autologger = kwargs.get("mlflow_autologger", None) class ComputeInstanceConnectivityEndpoints(msrest.serialization.Model): @@ -8044,21 +7610,17 @@ class ComputeInstanceConnectivityEndpoints(msrest.serialization.Model): """ _validation = { - 'public_ip_address': {'readonly': True}, - 'private_ip_address': {'readonly': True}, + "public_ip_address": {"readonly": True}, + "private_ip_address": {"readonly": True}, } _attribute_map = { - 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'}, - 'private_ip_address': {'key': 'privateIpAddress', 'type': 'str'}, + "public_ip_address": {"key": "publicIpAddress", "type": "str"}, + "private_ip_address": {"key": "privateIpAddress", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(ComputeInstanceConnectivityEndpoints, self).__init__(**kwargs) self.public_ip_address = None self.private_ip_address = None @@ -8084,22 +7646,19 @@ class ComputeInstanceContainer(msrest.serialization.Model): """ _validation = { - 'services': {'readonly': True}, + "services": {"readonly": True}, } _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'autosave': {'key': 'autosave', 'type': 'str'}, - 'gpu': {'key': 'gpu', 'type': 'str'}, - 'network': {'key': 'network', 'type': 'str'}, - 'environment': {'key': 'environment', 'type': 'ComputeInstanceEnvironmentInfo'}, - 'services': {'key': 'services', 'type': '[object]'}, + "name": {"key": "name", "type": "str"}, + "autosave": {"key": "autosave", "type": "str"}, + "gpu": {"key": "gpu", "type": "str"}, + "network": {"key": "network", "type": "str"}, + "environment": {"key": "environment", "type": "ComputeInstanceEnvironmentInfo"}, + "services": {"key": "services", "type": "[object]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword name: Name of the ComputeInstance container. :paramtype name: str @@ -8114,11 +7673,11 @@ def __init__( ~azure.mgmt.machinelearningservices.models.ComputeInstanceEnvironmentInfo """ super(ComputeInstanceContainer, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - self.autosave = kwargs.get('autosave', None) - self.gpu = kwargs.get('gpu', None) - self.network = kwargs.get('network', None) - self.environment = kwargs.get('environment', None) + self.name = kwargs.get("name", None) + self.autosave = kwargs.get("autosave", None) + self.gpu = kwargs.get("gpu", None) + self.network = kwargs.get("network", None) + self.environment = kwargs.get("environment", None) self.services = None @@ -8136,23 +7695,19 @@ class ComputeInstanceCreatedBy(msrest.serialization.Model): """ _validation = { - 'user_name': {'readonly': True}, - 'user_org_id': {'readonly': True}, - 'user_id': {'readonly': True}, + "user_name": {"readonly": True}, + "user_org_id": {"readonly": True}, + "user_id": {"readonly": True}, } _attribute_map = { - 'user_name': {'key': 'userName', 'type': 'str'}, - 'user_org_id': {'key': 'userOrgId', 'type': 'str'}, - 'user_id': {'key': 'userId', 'type': 'str'}, + "user_name": {"key": "userName", "type": "str"}, + "user_org_id": {"key": "userOrgId", "type": "str"}, + "user_id": {"key": "userId", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(ComputeInstanceCreatedBy, self).__init__(**kwargs) self.user_name = None self.user_org_id = None @@ -8177,16 +7732,13 @@ class ComputeInstanceDataDisk(msrest.serialization.Model): """ _attribute_map = { - 'caching': {'key': 'caching', 'type': 'str'}, - 'disk_size_gb': {'key': 'diskSizeGB', 'type': 'int'}, - 'lun': {'key': 'lun', 'type': 'int'}, - 'storage_account_type': {'key': 'storageAccountType', 'type': 'str'}, + "caching": {"key": "caching", "type": "str"}, + "disk_size_gb": {"key": "diskSizeGB", "type": "int"}, + "lun": {"key": "lun", "type": "int"}, + "storage_account_type": {"key": "storageAccountType", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword caching: Caching type of Data Disk. Possible values include: "None", "ReadOnly", "ReadWrite". @@ -8202,10 +7754,10 @@ def __init__( ~azure.mgmt.machinelearningservices.models.StorageAccountType """ super(ComputeInstanceDataDisk, self).__init__(**kwargs) - self.caching = kwargs.get('caching', None) - self.disk_size_gb = kwargs.get('disk_size_gb', None) - self.lun = kwargs.get('lun', None) - self.storage_account_type = kwargs.get('storage_account_type', "Standard_LRS") + self.caching = kwargs.get("caching", None) + self.disk_size_gb = kwargs.get("disk_size_gb", None) + self.lun = kwargs.get("lun", None) + self.storage_account_type = kwargs.get("storage_account_type", "Standard_LRS") class ComputeInstanceDataMount(msrest.serialization.Model): @@ -8235,22 +7787,19 @@ class ComputeInstanceDataMount(msrest.serialization.Model): """ _attribute_map = { - 'source': {'key': 'source', 'type': 'str'}, - 'source_type': {'key': 'sourceType', 'type': 'str'}, - 'mount_name': {'key': 'mountName', 'type': 'str'}, - 'mount_action': {'key': 'mountAction', 'type': 'str'}, - 'mount_mode': {'key': 'mountMode', 'type': 'str'}, - 'created_by': {'key': 'createdBy', 'type': 'str'}, - 'mount_path': {'key': 'mountPath', 'type': 'str'}, - 'mount_state': {'key': 'mountState', 'type': 'str'}, - 'mounted_on': {'key': 'mountedOn', 'type': 'iso-8601'}, - 'error': {'key': 'error', 'type': 'str'}, + "source": {"key": "source", "type": "str"}, + "source_type": {"key": "sourceType", "type": "str"}, + "mount_name": {"key": "mountName", "type": "str"}, + "mount_action": {"key": "mountAction", "type": "str"}, + "mount_mode": {"key": "mountMode", "type": "str"}, + "created_by": {"key": "createdBy", "type": "str"}, + "mount_path": {"key": "mountPath", "type": "str"}, + "mount_state": {"key": "mountState", "type": "str"}, + "mounted_on": {"key": "mountedOn", "type": "iso-8601"}, + "error": {"key": "error", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword source: Source of the ComputeInstance data mount. :paramtype source: str @@ -8275,16 +7824,16 @@ def __init__( :paramtype error: str """ super(ComputeInstanceDataMount, self).__init__(**kwargs) - self.source = kwargs.get('source', None) - self.source_type = kwargs.get('source_type', None) - self.mount_name = kwargs.get('mount_name', None) - self.mount_action = kwargs.get('mount_action', None) - self.mount_mode = kwargs.get('mount_mode', None) - self.created_by = kwargs.get('created_by', None) - self.mount_path = kwargs.get('mount_path', None) - self.mount_state = kwargs.get('mount_state', None) - self.mounted_on = kwargs.get('mounted_on', None) - self.error = kwargs.get('error', None) + self.source = kwargs.get("source", None) + self.source_type = kwargs.get("source_type", None) + self.mount_name = kwargs.get("mount_name", None) + self.mount_action = kwargs.get("mount_action", None) + self.mount_mode = kwargs.get("mount_mode", None) + self.created_by = kwargs.get("created_by", None) + self.mount_path = kwargs.get("mount_path", None) + self.mount_state = kwargs.get("mount_state", None) + self.mounted_on = kwargs.get("mounted_on", None) + self.error = kwargs.get("error", None) class ComputeInstanceEnvironmentInfo(msrest.serialization.Model): @@ -8297,14 +7846,11 @@ class ComputeInstanceEnvironmentInfo(msrest.serialization.Model): """ _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'version': {'key': 'version', 'type': 'str'}, + "name": {"key": "name", "type": "str"}, + "version": {"key": "version", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword name: name of environment. :paramtype name: str @@ -8312,8 +7858,8 @@ def __init__( :paramtype version: str """ super(ComputeInstanceEnvironmentInfo, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - self.version = kwargs.get('version', None) + self.name = kwargs.get("name", None) + self.version = kwargs.get("version", None) class ComputeInstanceLastOperation(msrest.serialization.Model): @@ -8334,16 +7880,13 @@ class ComputeInstanceLastOperation(msrest.serialization.Model): """ _attribute_map = { - 'operation_name': {'key': 'operationName', 'type': 'str'}, - 'operation_time': {'key': 'operationTime', 'type': 'iso-8601'}, - 'operation_status': {'key': 'operationStatus', 'type': 'str'}, - 'operation_trigger': {'key': 'operationTrigger', 'type': 'str'}, + "operation_name": {"key": "operationName", "type": "str"}, + "operation_time": {"key": "operationTime", "type": "iso-8601"}, + "operation_status": {"key": "operationStatus", "type": "str"}, + "operation_trigger": {"key": "operationTrigger", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword operation_name: Name of the last operation. Possible values include: "Create", "Start", "Stop", "Restart", "Resize", "Reimage", "Delete". @@ -8360,10 +7903,10 @@ def __init__( ~azure.mgmt.machinelearningservices.models.OperationTrigger """ super(ComputeInstanceLastOperation, self).__init__(**kwargs) - self.operation_name = kwargs.get('operation_name', None) - self.operation_time = kwargs.get('operation_time', None) - self.operation_status = kwargs.get('operation_status', None) - self.operation_trigger = kwargs.get('operation_trigger', None) + self.operation_name = kwargs.get("operation_name", None) + self.operation_time = kwargs.get("operation_time", None) + self.operation_status = kwargs.get("operation_status", None) + self.operation_trigger = kwargs.get("operation_trigger", None) class ComputeInstanceProperties(msrest.serialization.Model): @@ -8452,54 +7995,53 @@ class ComputeInstanceProperties(msrest.serialization.Model): """ _validation = { - 'os_image_metadata': {'readonly': True}, - 'connectivity_endpoints': {'readonly': True}, - 'applications': {'readonly': True}, - 'created_by': {'readonly': True}, - 'errors': {'readonly': True}, - 'state': {'readonly': True}, - 'last_operation': {'readonly': True}, - 'containers': {'readonly': True}, - 'data_disks': {'readonly': True}, - 'data_mounts': {'readonly': True}, - 'versions': {'readonly': True}, - } - - _attribute_map = { - 'vm_size': {'key': 'vmSize', 'type': 'str'}, - 'subnet': {'key': 'subnet', 'type': 'ResourceId'}, - 'application_sharing_policy': {'key': 'applicationSharingPolicy', 'type': 'str'}, - 'autologger_settings': {'key': 'autologgerSettings', 'type': 'ComputeInstanceAutologgerSettings'}, - 'ssh_settings': {'key': 'sshSettings', 'type': 'ComputeInstanceSshSettings'}, - 'custom_services': {'key': 'customServices', 'type': '[CustomService]'}, - 'os_image_metadata': {'key': 'osImageMetadata', 'type': 'ImageMetadata'}, - 'connectivity_endpoints': {'key': 'connectivityEndpoints', 'type': 'ComputeInstanceConnectivityEndpoints'}, - 'applications': {'key': 'applications', 'type': '[ComputeInstanceApplication]'}, - 'created_by': {'key': 'createdBy', 'type': 'ComputeInstanceCreatedBy'}, - 'errors': {'key': 'errors', 'type': '[ErrorResponse]'}, - 'state': {'key': 'state', 'type': 'str'}, - 'compute_instance_authorization_type': {'key': 'computeInstanceAuthorizationType', 'type': 'str'}, - 'enable_os_patching': {'key': 'enableOSPatching', 'type': 'bool'}, - 'enable_root_access': {'key': 'enableRootAccess', 'type': 'bool'}, - 'enable_sso': {'key': 'enableSSO', 'type': 'bool'}, - 'release_quota_on_stop': {'key': 'releaseQuotaOnStop', 'type': 'bool'}, - 'personal_compute_instance_settings': {'key': 'personalComputeInstanceSettings', - 'type': 'PersonalComputeInstanceSettings'}, - 'setup_scripts': {'key': 'setupScripts', 'type': 'SetupScripts'}, - 'last_operation': {'key': 'lastOperation', 'type': 'ComputeInstanceLastOperation'}, - 'schedules': {'key': 'schedules', 'type': 'ComputeSchedules'}, - 'idle_time_before_shutdown': {'key': 'idleTimeBeforeShutdown', 'type': 'str'}, - 'enable_node_public_ip': {'key': 'enableNodePublicIp', 'type': 'bool'}, - 'containers': {'key': 'containers', 'type': '[ComputeInstanceContainer]'}, - 'data_disks': {'key': 'dataDisks', 'type': '[ComputeInstanceDataDisk]'}, - 'data_mounts': {'key': 'dataMounts', 'type': '[ComputeInstanceDataMount]'}, - 'versions': {'key': 'versions', 'type': 'ComputeInstanceVersion'}, - } - - def __init__( - self, - **kwargs - ): + "os_image_metadata": {"readonly": True}, + "connectivity_endpoints": {"readonly": True}, + "applications": {"readonly": True}, + "created_by": {"readonly": True}, + "errors": {"readonly": True}, + "state": {"readonly": True}, + "last_operation": {"readonly": True}, + "containers": {"readonly": True}, + "data_disks": {"readonly": True}, + "data_mounts": {"readonly": True}, + "versions": {"readonly": True}, + } + + _attribute_map = { + "vm_size": {"key": "vmSize", "type": "str"}, + "subnet": {"key": "subnet", "type": "ResourceId"}, + "application_sharing_policy": {"key": "applicationSharingPolicy", "type": "str"}, + "autologger_settings": {"key": "autologgerSettings", "type": "ComputeInstanceAutologgerSettings"}, + "ssh_settings": {"key": "sshSettings", "type": "ComputeInstanceSshSettings"}, + "custom_services": {"key": "customServices", "type": "[CustomService]"}, + "os_image_metadata": {"key": "osImageMetadata", "type": "ImageMetadata"}, + "connectivity_endpoints": {"key": "connectivityEndpoints", "type": "ComputeInstanceConnectivityEndpoints"}, + "applications": {"key": "applications", "type": "[ComputeInstanceApplication]"}, + "created_by": {"key": "createdBy", "type": "ComputeInstanceCreatedBy"}, + "errors": {"key": "errors", "type": "[ErrorResponse]"}, + "state": {"key": "state", "type": "str"}, + "compute_instance_authorization_type": {"key": "computeInstanceAuthorizationType", "type": "str"}, + "enable_os_patching": {"key": "enableOSPatching", "type": "bool"}, + "enable_root_access": {"key": "enableRootAccess", "type": "bool"}, + "enable_sso": {"key": "enableSSO", "type": "bool"}, + "release_quota_on_stop": {"key": "releaseQuotaOnStop", "type": "bool"}, + "personal_compute_instance_settings": { + "key": "personalComputeInstanceSettings", + "type": "PersonalComputeInstanceSettings", + }, + "setup_scripts": {"key": "setupScripts", "type": "SetupScripts"}, + "last_operation": {"key": "lastOperation", "type": "ComputeInstanceLastOperation"}, + "schedules": {"key": "schedules", "type": "ComputeSchedules"}, + "idle_time_before_shutdown": {"key": "idleTimeBeforeShutdown", "type": "str"}, + "enable_node_public_ip": {"key": "enableNodePublicIp", "type": "bool"}, + "containers": {"key": "containers", "type": "[ComputeInstanceContainer]"}, + "data_disks": {"key": "dataDisks", "type": "[ComputeInstanceDataDisk]"}, + "data_mounts": {"key": "dataMounts", "type": "[ComputeInstanceDataMount]"}, + "versions": {"key": "versions", "type": "ComputeInstanceVersion"}, + } + + def __init__(self, **kwargs): """ :keyword vm_size: Virtual Machine Size. :paramtype vm_size: str @@ -8551,29 +8093,29 @@ def __init__( :paramtype enable_node_public_ip: bool """ super(ComputeInstanceProperties, self).__init__(**kwargs) - self.vm_size = kwargs.get('vm_size', None) - self.subnet = kwargs.get('subnet', None) - self.application_sharing_policy = kwargs.get('application_sharing_policy', "Shared") - self.autologger_settings = kwargs.get('autologger_settings', None) - self.ssh_settings = kwargs.get('ssh_settings', None) - self.custom_services = kwargs.get('custom_services', None) + self.vm_size = kwargs.get("vm_size", None) + self.subnet = kwargs.get("subnet", None) + self.application_sharing_policy = kwargs.get("application_sharing_policy", "Shared") + self.autologger_settings = kwargs.get("autologger_settings", None) + self.ssh_settings = kwargs.get("ssh_settings", None) + self.custom_services = kwargs.get("custom_services", None) self.os_image_metadata = None self.connectivity_endpoints = None self.applications = None self.created_by = None self.errors = None self.state = None - self.compute_instance_authorization_type = kwargs.get('compute_instance_authorization_type', "personal") - self.enable_os_patching = kwargs.get('enable_os_patching', False) - self.enable_root_access = kwargs.get('enable_root_access', True) - self.enable_sso = kwargs.get('enable_sso', True) - self.release_quota_on_stop = kwargs.get('release_quota_on_stop', False) - self.personal_compute_instance_settings = kwargs.get('personal_compute_instance_settings', None) - self.setup_scripts = kwargs.get('setup_scripts', None) + self.compute_instance_authorization_type = kwargs.get("compute_instance_authorization_type", "personal") + self.enable_os_patching = kwargs.get("enable_os_patching", False) + self.enable_root_access = kwargs.get("enable_root_access", True) + self.enable_sso = kwargs.get("enable_sso", True) + self.release_quota_on_stop = kwargs.get("release_quota_on_stop", False) + self.personal_compute_instance_settings = kwargs.get("personal_compute_instance_settings", None) + self.setup_scripts = kwargs.get("setup_scripts", None) self.last_operation = None - self.schedules = kwargs.get('schedules', None) - self.idle_time_before_shutdown = kwargs.get('idle_time_before_shutdown', None) - self.enable_node_public_ip = kwargs.get('enable_node_public_ip', None) + self.schedules = kwargs.get("schedules", None) + self.idle_time_before_shutdown = kwargs.get("idle_time_before_shutdown", None) + self.enable_node_public_ip = kwargs.get("enable_node_public_ip", None) self.containers = None self.data_disks = None self.data_mounts = None @@ -8600,21 +8142,18 @@ class ComputeInstanceSshSettings(msrest.serialization.Model): """ _validation = { - 'admin_user_name': {'readonly': True}, - 'ssh_port': {'readonly': True}, + "admin_user_name": {"readonly": True}, + "ssh_port": {"readonly": True}, } _attribute_map = { - 'ssh_public_access': {'key': 'sshPublicAccess', 'type': 'str'}, - 'admin_user_name': {'key': 'adminUserName', 'type': 'str'}, - 'ssh_port': {'key': 'sshPort', 'type': 'int'}, - 'admin_public_key': {'key': 'adminPublicKey', 'type': 'str'}, + "ssh_public_access": {"key": "sshPublicAccess", "type": "str"}, + "admin_user_name": {"key": "adminUserName", "type": "str"}, + "ssh_port": {"key": "sshPort", "type": "int"}, + "admin_public_key": {"key": "adminPublicKey", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword ssh_public_access: State of the public SSH port. Possible values are: Disabled - Indicates that the public ssh port is closed on this instance. Enabled - Indicates that the @@ -8626,10 +8165,10 @@ def __init__( :paramtype admin_public_key: str """ super(ComputeInstanceSshSettings, self).__init__(**kwargs) - self.ssh_public_access = kwargs.get('ssh_public_access', "Disabled") + self.ssh_public_access = kwargs.get("ssh_public_access", "Disabled") self.admin_user_name = None self.ssh_port = None - self.admin_public_key = kwargs.get('admin_public_key', None) + self.admin_public_key = kwargs.get("admin_public_key", None) class ComputeInstanceVersion(msrest.serialization.Model): @@ -8640,19 +8179,16 @@ class ComputeInstanceVersion(msrest.serialization.Model): """ _attribute_map = { - 'runtime': {'key': 'runtime', 'type': 'str'}, + "runtime": {"key": "runtime", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword runtime: Runtime of compute instance. :paramtype runtime: str """ super(ComputeInstanceVersion, self).__init__(**kwargs) - self.runtime = kwargs.get('runtime', None) + self.runtime = kwargs.get("runtime", None) class ComputeRecurrenceSchedule(msrest.serialization.Model): @@ -8671,21 +8207,18 @@ class ComputeRecurrenceSchedule(msrest.serialization.Model): """ _validation = { - 'hours': {'required': True}, - 'minutes': {'required': True}, + "hours": {"required": True}, + "minutes": {"required": True}, } _attribute_map = { - 'hours': {'key': 'hours', 'type': '[int]'}, - 'minutes': {'key': 'minutes', 'type': '[int]'}, - 'month_days': {'key': 'monthDays', 'type': '[int]'}, - 'week_days': {'key': 'weekDays', 'type': '[str]'}, + "hours": {"key": "hours", "type": "[int]"}, + "minutes": {"key": "minutes", "type": "[int]"}, + "month_days": {"key": "monthDays", "type": "[int]"}, + "week_days": {"key": "weekDays", "type": "[str]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword hours: Required. [Required] List of hours for the schedule. :paramtype hours: list[int] @@ -8697,10 +8230,10 @@ def __init__( :paramtype week_days: list[str or ~azure.mgmt.machinelearningservices.models.ComputeWeekDay] """ super(ComputeRecurrenceSchedule, self).__init__(**kwargs) - self.hours = kwargs['hours'] - self.minutes = kwargs['minutes'] - self.month_days = kwargs.get('month_days', None) - self.week_days = kwargs.get('week_days', None) + self.hours = kwargs["hours"] + self.minutes = kwargs["minutes"] + self.month_days = kwargs.get("month_days", None) + self.week_days = kwargs.get("week_days", None) class ComputeResourceSchema(msrest.serialization.Model): @@ -8711,19 +8244,16 @@ class ComputeResourceSchema(msrest.serialization.Model): """ _attribute_map = { - 'properties': {'key': 'properties', 'type': 'Compute'}, + "properties": {"key": "properties", "type": "Compute"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword properties: Compute properties. :paramtype properties: ~azure.mgmt.machinelearningservices.models.Compute """ super(ComputeResourceSchema, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) + self.properties = kwargs.get("properties", None) class ComputeResource(Resource, ComputeResourceSchema): @@ -8755,28 +8285,25 @@ class ComputeResource(Resource, ComputeResourceSchema): """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, } _attribute_map = { - 'properties': {'key': 'properties', 'type': 'Compute'}, - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'}, - 'location': {'key': 'location', 'type': 'str'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'sku': {'key': 'sku', 'type': 'Sku'}, + "properties": {"key": "properties", "type": "Compute"}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "identity": {"key": "identity", "type": "ManagedServiceIdentity"}, + "location": {"key": "location", "type": "str"}, + "tags": {"key": "tags", "type": "{str}"}, + "sku": {"key": "sku", "type": "Sku"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword properties: Compute properties. :paramtype properties: ~azure.mgmt.machinelearningservices.models.Compute @@ -8790,11 +8317,11 @@ def __init__( :paramtype sku: ~azure.mgmt.machinelearningservices.models.Sku """ super(ComputeResource, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) - self.identity = kwargs.get('identity', None) - self.location = kwargs.get('location', None) - self.tags = kwargs.get('tags', None) - self.sku = kwargs.get('sku', None) + self.properties = kwargs.get("properties", None) + self.identity = kwargs.get("identity", None) + self.location = kwargs.get("location", None) + self.tags = kwargs.get("tags", None) + self.sku = kwargs.get("sku", None) self.id = None self.name = None self.type = None @@ -8809,19 +8336,16 @@ class ComputeRuntimeDto(msrest.serialization.Model): """ _attribute_map = { - 'spark_runtime_version': {'key': 'sparkRuntimeVersion', 'type': 'str'}, + "spark_runtime_version": {"key": "sparkRuntimeVersion", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword spark_runtime_version: :paramtype spark_runtime_version: str """ super(ComputeRuntimeDto, self).__init__(**kwargs) - self.spark_runtime_version = kwargs.get('spark_runtime_version', None) + self.spark_runtime_version = kwargs.get("spark_runtime_version", None) class ComputeSchedules(msrest.serialization.Model): @@ -8833,20 +8357,17 @@ class ComputeSchedules(msrest.serialization.Model): """ _attribute_map = { - 'compute_start_stop': {'key': 'computeStartStop', 'type': '[ComputeStartStopSchedule]'}, + "compute_start_stop": {"key": "computeStartStop", "type": "[ComputeStartStopSchedule]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword compute_start_stop: The list of compute start stop schedules to be applied. :paramtype compute_start_stop: list[~azure.mgmt.machinelearningservices.models.ComputeStartStopSchedule] """ super(ComputeSchedules, self).__init__(**kwargs) - self.compute_start_stop = kwargs.get('compute_start_stop', None) + self.compute_start_stop = kwargs.get("compute_start_stop", None) class ComputeStartStopSchedule(msrest.serialization.Model): @@ -8877,25 +8398,22 @@ class ComputeStartStopSchedule(msrest.serialization.Model): """ _validation = { - 'id': {'readonly': True}, - 'provisioning_status': {'readonly': True}, + "id": {"readonly": True}, + "provisioning_status": {"readonly": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'provisioning_status': {'key': 'provisioningStatus', 'type': 'str'}, - 'status': {'key': 'status', 'type': 'str'}, - 'action': {'key': 'action', 'type': 'str'}, - 'trigger_type': {'key': 'triggerType', 'type': 'str'}, - 'recurrence': {'key': 'recurrence', 'type': 'Recurrence'}, - 'cron': {'key': 'cron', 'type': 'Cron'}, - 'schedule': {'key': 'schedule', 'type': 'ScheduleBase'}, + "id": {"key": "id", "type": "str"}, + "provisioning_status": {"key": "provisioningStatus", "type": "str"}, + "status": {"key": "status", "type": "str"}, + "action": {"key": "action", "type": "str"}, + "trigger_type": {"key": "triggerType", "type": "str"}, + "recurrence": {"key": "recurrence", "type": "Recurrence"}, + "cron": {"key": "cron", "type": "Cron"}, + "schedule": {"key": "schedule", "type": "ScheduleBase"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword status: Is the schedule enabled or disabled?. Possible values include: "Enabled", "Disabled". @@ -8915,12 +8433,12 @@ def __init__( super(ComputeStartStopSchedule, self).__init__(**kwargs) self.id = None self.provisioning_status = None - self.status = kwargs.get('status', None) - self.action = kwargs.get('action', None) - self.trigger_type = kwargs.get('trigger_type', None) - self.recurrence = kwargs.get('recurrence', None) - self.cron = kwargs.get('cron', None) - self.schedule = kwargs.get('schedule', None) + self.status = kwargs.get("status", None) + self.action = kwargs.get("action", None) + self.trigger_type = kwargs.get("trigger_type", None) + self.recurrence = kwargs.get("recurrence", None) + self.cron = kwargs.get("cron", None) + self.schedule = kwargs.get("schedule", None) class ContainerResourceRequirements(msrest.serialization.Model): @@ -8935,14 +8453,11 @@ class ContainerResourceRequirements(msrest.serialization.Model): """ _attribute_map = { - 'container_resource_limits': {'key': 'containerResourceLimits', 'type': 'ContainerResourceSettings'}, - 'container_resource_requests': {'key': 'containerResourceRequests', 'type': 'ContainerResourceSettings'}, + "container_resource_limits": {"key": "containerResourceLimits", "type": "ContainerResourceSettings"}, + "container_resource_requests": {"key": "containerResourceRequests", "type": "ContainerResourceSettings"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword container_resource_limits: Container resource limit info:. :paramtype container_resource_limits: @@ -8952,8 +8467,8 @@ def __init__( ~azure.mgmt.machinelearningservices.models.ContainerResourceSettings """ super(ContainerResourceRequirements, self).__init__(**kwargs) - self.container_resource_limits = kwargs.get('container_resource_limits', None) - self.container_resource_requests = kwargs.get('container_resource_requests', None) + self.container_resource_limits = kwargs.get("container_resource_limits", None) + self.container_resource_requests = kwargs.get("container_resource_requests", None) class ContainerResourceSettings(msrest.serialization.Model): @@ -8971,15 +8486,12 @@ class ContainerResourceSettings(msrest.serialization.Model): """ _attribute_map = { - 'cpu': {'key': 'cpu', 'type': 'str'}, - 'gpu': {'key': 'gpu', 'type': 'str'}, - 'memory': {'key': 'memory', 'type': 'str'}, + "cpu": {"key": "cpu", "type": "str"}, + "gpu": {"key": "gpu", "type": "str"}, + "memory": {"key": "memory", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword cpu: Number of vCPUs request/limit for container. More info: https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/. @@ -8992,9 +8504,9 @@ def __init__( :paramtype memory: str """ super(ContainerResourceSettings, self).__init__(**kwargs) - self.cpu = kwargs.get('cpu', None) - self.gpu = kwargs.get('gpu', None) - self.memory = kwargs.get('memory', None) + self.cpu = kwargs.get("cpu", None) + self.gpu = kwargs.get("gpu", None) + self.memory = kwargs.get("memory", None) class EndpointDeploymentResourceProperties(msrest.serialization.Model): @@ -9018,39 +8530,39 @@ class EndpointDeploymentResourceProperties(msrest.serialization.Model): """ _validation = { - 'provisioning_state': {'readonly': True}, - 'type': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9._]'}, + "provisioning_state": {"readonly": True}, + "type": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9._]"}, } _attribute_map = { - 'failure_reason': {'key': 'failureReason', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, + "failure_reason": {"key": "failureReason", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "type": {"key": "type", "type": "str"}, } _subtype_map = { - 'type': {'Azure.ContentSafety': 'ContentSafetyEndpointDeploymentResourceProperties', - 'Azure.OpenAI': 'OpenAIEndpointDeploymentResourceProperties', - 'Azure.Speech': 'SpeechEndpointDeploymentResourceProperties', - 'managedOnlineEndpoint': 'ManagedOnlineEndpointDeploymentResourceProperties'} + "type": { + "Azure.ContentSafety": "ContentSafetyEndpointDeploymentResourceProperties", + "Azure.OpenAI": "OpenAIEndpointDeploymentResourceProperties", + "Azure.Speech": "SpeechEndpointDeploymentResourceProperties", + "managedOnlineEndpoint": "ManagedOnlineEndpointDeploymentResourceProperties", + } } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword failure_reason: The failure reason if the creation failed. :paramtype failure_reason: str """ super(EndpointDeploymentResourceProperties, self).__init__(**kwargs) - self.failure_reason = kwargs.get('failure_reason', None) + self.failure_reason = kwargs.get("failure_reason", None) self.provisioning_state = None self.type = None # type: Optional[str] -class ContentSafetyEndpointDeploymentResourceProperties(EndpointDeploymentResourceProperties, - CognitiveServiceEndpointDeploymentResourceProperties): +class ContentSafetyEndpointDeploymentResourceProperties( + EndpointDeploymentResourceProperties, CognitiveServiceEndpointDeploymentResourceProperties +): """ContentSafetyEndpointDeploymentResourceProperties. Variables are only populated by the server, and will be ignored when sending a request. @@ -9078,25 +8590,22 @@ class ContentSafetyEndpointDeploymentResourceProperties(EndpointDeploymentResour """ _validation = { - 'model': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'type': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9._]'}, + "model": {"required": True}, + "provisioning_state": {"readonly": True}, + "type": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9._]"}, } _attribute_map = { - 'model': {'key': 'model', 'type': 'EndpointDeploymentModel'}, - 'rai_policy_name': {'key': 'raiPolicyName', 'type': 'str'}, - 'sku': {'key': 'sku', 'type': 'CognitiveServicesSku'}, - 'version_upgrade_option': {'key': 'versionUpgradeOption', 'type': 'str'}, - 'failure_reason': {'key': 'failureReason', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, + "model": {"key": "model", "type": "EndpointDeploymentModel"}, + "rai_policy_name": {"key": "raiPolicyName", "type": "str"}, + "sku": {"key": "sku", "type": "CognitiveServicesSku"}, + "version_upgrade_option": {"key": "versionUpgradeOption", "type": "str"}, + "failure_reason": {"key": "failureReason", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "type": {"key": "type", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword model: Required. Model used for the endpoint deployment. :paramtype model: ~azure.mgmt.machinelearningservices.models.EndpointDeploymentModel @@ -9112,12 +8621,12 @@ def __init__( :paramtype failure_reason: str """ super(ContentSafetyEndpointDeploymentResourceProperties, self).__init__(**kwargs) - self.model = kwargs['model'] - self.rai_policy_name = kwargs.get('rai_policy_name', None) - self.sku = kwargs.get('sku', None) - self.version_upgrade_option = kwargs.get('version_upgrade_option', None) - self.type = 'Azure.ContentSafety' # type: str - self.failure_reason = kwargs.get('failure_reason', None) + self.model = kwargs["model"] + self.rai_policy_name = kwargs.get("rai_policy_name", None) + self.sku = kwargs.get("sku", None) + self.version_upgrade_option = kwargs.get("version_upgrade_option", None) + self.type = "Azure.ContentSafety" # type: str + self.failure_reason = kwargs.get("failure_reason", None) self.provisioning_state = None @@ -9151,30 +8660,29 @@ class EndpointResourceProperties(msrest.serialization.Model): """ _validation = { - 'endpoint_type': {'required': True}, - 'provisioning_state': {'readonly': True}, + "endpoint_type": {"required": True}, + "provisioning_state": {"readonly": True}, } _attribute_map = { - 'associated_resource_id': {'key': 'associatedResourceId', 'type': 'str'}, - 'endpoint_type': {'key': 'endpointType', 'type': 'str'}, - 'endpoint_uri': {'key': 'endpointUri', 'type': 'str'}, - 'failure_reason': {'key': 'failureReason', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + "associated_resource_id": {"key": "associatedResourceId", "type": "str"}, + "endpoint_type": {"key": "endpointType", "type": "str"}, + "endpoint_uri": {"key": "endpointUri", "type": "str"}, + "failure_reason": {"key": "failureReason", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, } _subtype_map = { - 'endpoint_type': {'Azure.ContentSafety': 'ContentSafetyEndpointResourceProperties', - 'Azure.OpenAI': 'OpenAIEndpointResourceProperties', - 'Azure.Speech': 'SpeechEndpointResourceProperties', - 'managedOnlineEndpoint': 'ManagedOnlineEndpointResourceProperties'} + "endpoint_type": { + "Azure.ContentSafety": "ContentSafetyEndpointResourceProperties", + "Azure.OpenAI": "OpenAIEndpointResourceProperties", + "Azure.Speech": "SpeechEndpointResourceProperties", + "managedOnlineEndpoint": "ManagedOnlineEndpointResourceProperties", + } } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword associated_resource_id: Byo resource id for creating the built-in model service endpoints. @@ -9187,11 +8695,11 @@ def __init__( :paramtype name: str """ super(EndpointResourceProperties, self).__init__(**kwargs) - self.associated_resource_id = kwargs.get('associated_resource_id', None) + self.associated_resource_id = kwargs.get("associated_resource_id", None) self.endpoint_type = None # type: Optional[str] - self.endpoint_uri = kwargs.get('endpoint_uri', None) - self.failure_reason = kwargs.get('failure_reason', None) - self.name = kwargs.get('name', None) + self.endpoint_uri = kwargs.get("endpoint_uri", None) + self.failure_reason = kwargs.get("failure_reason", None) + self.name = kwargs.get("name", None) self.provisioning_state = None @@ -9222,23 +8730,20 @@ class ContentSafetyEndpointResourceProperties(EndpointResourceProperties): """ _validation = { - 'endpoint_type': {'required': True}, - 'provisioning_state': {'readonly': True}, + "endpoint_type": {"required": True}, + "provisioning_state": {"readonly": True}, } _attribute_map = { - 'associated_resource_id': {'key': 'associatedResourceId', 'type': 'str'}, - 'endpoint_type': {'key': 'endpointType', 'type': 'str'}, - 'endpoint_uri': {'key': 'endpointUri', 'type': 'str'}, - 'failure_reason': {'key': 'failureReason', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + "associated_resource_id": {"key": "associatedResourceId", "type": "str"}, + "endpoint_type": {"key": "endpointType", "type": "str"}, + "endpoint_uri": {"key": "endpointUri", "type": "str"}, + "failure_reason": {"key": "failureReason", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword associated_resource_id: Byo resource id for creating the built-in model service endpoints. @@ -9251,7 +8756,7 @@ def __init__( :paramtype name: str """ super(ContentSafetyEndpointResourceProperties, self).__init__(**kwargs) - self.endpoint_type = 'Azure.ContentSafety' # type: str + self.endpoint_type = "Azure.ContentSafety" # type: str class CosmosDbSettings(msrest.serialization.Model): @@ -9262,19 +8767,16 @@ class CosmosDbSettings(msrest.serialization.Model): """ _attribute_map = { - 'collections_throughput': {'key': 'collectionsThroughput', 'type': 'int'}, + "collections_throughput": {"key": "collectionsThroughput", "type": "int"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword collections_throughput: :paramtype collections_throughput: int """ super(CosmosDbSettings, self).__init__(**kwargs) - self.collections_throughput = kwargs.get('collections_throughput', None) + self.collections_throughput = kwargs.get("collections_throughput", None) class ScheduleActionBase(msrest.serialization.Model): @@ -9292,24 +8794,24 @@ class ScheduleActionBase(msrest.serialization.Model): """ _validation = { - 'action_type': {'required': True}, + "action_type": {"required": True}, } _attribute_map = { - 'action_type': {'key': 'actionType', 'type': 'str'}, + "action_type": {"key": "actionType", "type": "str"}, } _subtype_map = { - 'action_type': {'CreateJob': 'JobScheduleAction', 'CreateMonitor': 'CreateMonitorAction', - 'ImportData': 'ImportDataAction', 'InvokeBatchEndpoint': 'EndpointScheduleAction'} + "action_type": { + "CreateJob": "JobScheduleAction", + "CreateMonitor": "CreateMonitorAction", + "ImportData": "ImportDataAction", + "InvokeBatchEndpoint": "EndpointScheduleAction", + } } - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(ScheduleActionBase, self).__init__(**kwargs) self.action_type = None # type: Optional[str] @@ -9328,26 +8830,23 @@ class CreateMonitorAction(ScheduleActionBase): """ _validation = { - 'action_type': {'required': True}, - 'monitor_definition': {'required': True}, + "action_type": {"required": True}, + "monitor_definition": {"required": True}, } _attribute_map = { - 'action_type': {'key': 'actionType', 'type': 'str'}, - 'monitor_definition': {'key': 'monitorDefinition', 'type': 'MonitorDefinition'}, + "action_type": {"key": "actionType", "type": "str"}, + "monitor_definition": {"key": "monitorDefinition", "type": "MonitorDefinition"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword monitor_definition: Required. [Required] Defines the monitor. :paramtype monitor_definition: ~azure.mgmt.machinelearningservices.models.MonitorDefinition """ super(CreateMonitorAction, self).__init__(**kwargs) - self.action_type = 'CreateMonitor' # type: str - self.monitor_definition = kwargs['monitor_definition'] + self.action_type = "CreateMonitor" # type: str + self.monitor_definition = kwargs["monitor_definition"] class Cron(msrest.serialization.Model): @@ -9365,15 +8864,12 @@ class Cron(msrest.serialization.Model): """ _attribute_map = { - 'start_time': {'key': 'startTime', 'type': 'str'}, - 'time_zone': {'key': 'timeZone', 'type': 'str'}, - 'expression': {'key': 'expression', 'type': 'str'}, + "start_time": {"key": "startTime", "type": "str"}, + "time_zone": {"key": "timeZone", "type": "str"}, + "expression": {"key": "expression", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword start_time: The start time in yyyy-MM-ddTHH:mm:ss format. :paramtype start_time: str @@ -9386,9 +8882,9 @@ def __init__( :paramtype expression: str """ super(Cron, self).__init__(**kwargs) - self.start_time = kwargs.get('start_time', None) - self.time_zone = kwargs.get('time_zone', "UTC") - self.expression = kwargs.get('expression', None) + self.start_time = kwargs.get("start_time", None) + self.time_zone = kwargs.get("time_zone", "UTC") + self.expression = kwargs.get("expression", None) class TriggerBase(msrest.serialization.Model): @@ -9417,24 +8913,19 @@ class TriggerBase(msrest.serialization.Model): """ _validation = { - 'trigger_type': {'required': True}, + "trigger_type": {"required": True}, } _attribute_map = { - 'end_time': {'key': 'endTime', 'type': 'str'}, - 'start_time': {'key': 'startTime', 'type': 'str'}, - 'time_zone': {'key': 'timeZone', 'type': 'str'}, - 'trigger_type': {'key': 'triggerType', 'type': 'str'}, + "end_time": {"key": "endTime", "type": "str"}, + "start_time": {"key": "startTime", "type": "str"}, + "time_zone": {"key": "timeZone", "type": "str"}, + "trigger_type": {"key": "triggerType", "type": "str"}, } - _subtype_map = { - 'trigger_type': {'Cron': 'CronTrigger', 'Recurrence': 'RecurrenceTrigger'} - } + _subtype_map = {"trigger_type": {"Cron": "CronTrigger", "Recurrence": "RecurrenceTrigger"}} - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword end_time: Specifies end time of schedule in ISO 8601, but without a UTC offset. Refer https://en.wikipedia.org/wiki/ISO_8601. @@ -9450,9 +8941,9 @@ def __init__( :paramtype time_zone: str """ super(TriggerBase, self).__init__(**kwargs) - self.end_time = kwargs.get('end_time', None) - self.start_time = kwargs.get('start_time', None) - self.time_zone = kwargs.get('time_zone', "UTC") + self.end_time = kwargs.get("end_time", None) + self.start_time = kwargs.get("start_time", None) + self.time_zone = kwargs.get("time_zone", "UTC") self.trigger_type = None # type: Optional[str] @@ -9482,22 +8973,19 @@ class CronTrigger(TriggerBase): """ _validation = { - 'trigger_type': {'required': True}, - 'expression': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "trigger_type": {"required": True}, + "expression": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'end_time': {'key': 'endTime', 'type': 'str'}, - 'start_time': {'key': 'startTime', 'type': 'str'}, - 'time_zone': {'key': 'timeZone', 'type': 'str'}, - 'trigger_type': {'key': 'triggerType', 'type': 'str'}, - 'expression': {'key': 'expression', 'type': 'str'}, + "end_time": {"key": "endTime", "type": "str"}, + "start_time": {"key": "startTime", "type": "str"}, + "time_zone": {"key": "timeZone", "type": "str"}, + "trigger_type": {"key": "triggerType", "type": "str"}, + "expression": {"key": "expression", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword end_time: Specifies end time of schedule in ISO 8601, but without a UTC offset. Refer https://en.wikipedia.org/wiki/ISO_8601. @@ -9516,8 +9004,8 @@ def __init__( :paramtype expression: str """ super(CronTrigger, self).__init__(**kwargs) - self.trigger_type = 'Cron' # type: str - self.expression = kwargs['expression'] + self.trigger_type = "Cron" # type: str + self.expression = kwargs["expression"] class CsvExportSummary(ExportSummary): @@ -9545,33 +9033,29 @@ class CsvExportSummary(ExportSummary): """ _validation = { - 'end_date_time': {'readonly': True}, - 'exported_row_count': {'readonly': True}, - 'format': {'required': True}, - 'labeling_job_id': {'readonly': True}, - 'start_date_time': {'readonly': True}, - 'container_name': {'readonly': True}, - 'snapshot_path': {'readonly': True}, + "end_date_time": {"readonly": True}, + "exported_row_count": {"readonly": True}, + "format": {"required": True}, + "labeling_job_id": {"readonly": True}, + "start_date_time": {"readonly": True}, + "container_name": {"readonly": True}, + "snapshot_path": {"readonly": True}, } _attribute_map = { - 'end_date_time': {'key': 'endDateTime', 'type': 'iso-8601'}, - 'exported_row_count': {'key': 'exportedRowCount', 'type': 'long'}, - 'format': {'key': 'format', 'type': 'str'}, - 'labeling_job_id': {'key': 'labelingJobId', 'type': 'str'}, - 'start_date_time': {'key': 'startDateTime', 'type': 'iso-8601'}, - 'container_name': {'key': 'containerName', 'type': 'str'}, - 'snapshot_path': {'key': 'snapshotPath', 'type': 'str'}, + "end_date_time": {"key": "endDateTime", "type": "iso-8601"}, + "exported_row_count": {"key": "exportedRowCount", "type": "long"}, + "format": {"key": "format", "type": "str"}, + "labeling_job_id": {"key": "labelingJobId", "type": "str"}, + "start_date_time": {"key": "startDateTime", "type": "iso-8601"}, + "container_name": {"key": "containerName", "type": "str"}, + "snapshot_path": {"key": "snapshotPath", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(CsvExportSummary, self).__init__(**kwargs) - self.format = 'CSV' # type: str + self.format = "CSV" # type: str self.container_name = None self.snapshot_path = None @@ -9589,26 +9073,23 @@ class CustomForecastHorizon(ForecastHorizon): """ _validation = { - 'mode': {'required': True}, - 'value': {'required': True}, + "mode": {"required": True}, + "value": {"required": True}, } _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'int'}, + "mode": {"key": "mode", "type": "str"}, + "value": {"key": "value", "type": "int"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword value: Required. [Required] Forecast horizon value. :paramtype value: int """ super(CustomForecastHorizon, self).__init__(**kwargs) - self.mode = 'Custom' # type: str - self.value = kwargs['value'] + self.mode = "Custom" # type: str + self.value = kwargs["value"] class CustomInferencingServer(InferencingServer): @@ -9625,26 +9106,23 @@ class CustomInferencingServer(InferencingServer): """ _validation = { - 'server_type': {'required': True}, + "server_type": {"required": True}, } _attribute_map = { - 'server_type': {'key': 'serverType', 'type': 'str'}, - 'inference_configuration': {'key': 'inferenceConfiguration', 'type': 'OnlineInferenceConfiguration'}, + "server_type": {"key": "serverType", "type": "str"}, + "inference_configuration": {"key": "inferenceConfiguration", "type": "OnlineInferenceConfiguration"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword inference_configuration: Inference configuration for custom inferencing. :paramtype inference_configuration: ~azure.mgmt.machinelearningservices.models.OnlineInferenceConfiguration """ super(CustomInferencingServer, self).__init__(**kwargs) - self.server_type = 'Custom' # type: str - self.inference_configuration = kwargs.get('inference_configuration', None) + self.server_type = "Custom" # type: str + self.inference_configuration = kwargs.get("inference_configuration", None) class CustomKeys(msrest.serialization.Model): @@ -9655,96 +9133,90 @@ class CustomKeys(msrest.serialization.Model): """ _attribute_map = { - 'keys': {'key': 'keys', 'type': '{str}'}, + "keys": {"key": "keys", "type": "{str}"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword keys: Dictionary of :code:``. :paramtype keys: dict[str, str] """ super(CustomKeys, self).__init__(**kwargs) - self.keys = kwargs.get('keys', None) + self.keys = kwargs.get("keys", None) class CustomKeysWorkspaceConnectionProperties(WorkspaceConnectionPropertiesV2): """Category:= CustomKeys -AuthType:= CustomKeys (as type discriminator) -Credentials:= {CustomKeys} as Microsoft.MachineLearning.AccountRP.Contracts.WorkspaceConnection.CustomKeys -Target:= {any value} -Use Metadata property bag for ApiVersion and other metadata fields. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar auth_type: Required. Authentication type of the connection target.Constant filled by - server. Possible values include: "PAT", "ManagedIdentity", "UsernamePassword", "None", "SAS", - "AccountKey", "ServicePrincipal", "AccessKey", "ApiKey", "CustomKeys", "OAuth2", "AAD". - :vartype auth_type: str or ~azure.mgmt.machinelearningservices.models.ConnectionAuthType - :ivar category: Category of the connection. Possible values include: "PythonFeed", - "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", - "AzureMySqlDb", "AzurePostgresDb", "ADLSGen2", "Redis", "ApiKey", "AzureOpenAI", - "CognitiveSearch", "CognitiveService", "CustomKeys", "AzureBlob", "AzureOneLake", "CosmosDb", - "CosmosDbMongoDbApi", "AzureDataExplorer", "AzureMariaDb", "AzureDatabricksDeltaLake", - "AzureSqlMi", "AzureTableStorage", "AmazonRdsForOracle", "AmazonRdsForSqlServer", - "AmazonRedshift", "Db2", "Drill", "GoogleBigQuery", "Greenplum", "Hbase", "Hive", "Impala", - "Informix", "MariaDb", "MicrosoftAccess", "MySql", "Netezza", "Oracle", "Phoenix", - "PostgreSql", "Presto", "SapOpenHub", "SapBw", "SapHana", "SapTable", "Spark", "SqlServer", - "Sybase", "Teradata", "Vertica", "Cassandra", "Couchbase", "MongoDbV2", "MongoDbAtlas", - "AmazonS3Compatible", "FileServer", "FtpServer", "GoogleCloudStorage", "Hdfs", - "OracleCloudStorage", "Sftp", "GenericHttp", "ODataRest", "Odbc", "GenericRest", "AmazonMws", - "Concur", "Dynamics", "DynamicsAx", "DynamicsCrm", "GoogleAdWords", "Hubspot", "Jira", - "Magento", "Marketo", "Office365", "Eloqua", "Responsys", "OracleServiceCloud", "PayPal", - "QuickBooks", "Salesforce", "SalesforceServiceCloud", "SalesforceMarketingCloud", - "SapCloudForCustomer", "SapEcc", "ServiceNow", "SharePointOnlineList", "Shopify", "Square", - "WebTable", "Xero", "Zoho", "GenericContainerRegistry". - :vartype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory - :ivar created_by_workspace_arm_id: - :vartype created_by_workspace_arm_id: str - :ivar expiry_time: - :vartype expiry_time: ~datetime.datetime - :ivar group: Group based on connection category. Possible values include: "Azure", "AzureAI", - "Database", "NoSQL", "File", "GenericProtocol", "ServicesAndApps". - :vartype group: str or ~azure.mgmt.machinelearningservices.models.ConnectionGroup - :ivar is_shared_to_all: - :vartype is_shared_to_all: bool - :ivar metadata: Any object. - :vartype metadata: any - :ivar shared_user_list: - :vartype shared_user_list: list[str] - :ivar target: - :vartype target: str - :ivar credentials: Custom Keys credential object. - :vartype credentials: ~azure.mgmt.machinelearningservices.models.CustomKeys - """ + AuthType:= CustomKeys (as type discriminator) + Credentials:= {CustomKeys} as Microsoft.MachineLearning.AccountRP.Contracts.WorkspaceConnection.CustomKeys + Target:= {any value} + Use Metadata property bag for ApiVersion and other metadata fields. - _validation = { - 'auth_type': {'required': True}, - 'created_by_workspace_arm_id': {'readonly': True}, - 'group': {'readonly': True}, - } + Variables are only populated by the server, and will be ignored when sending a request. - _attribute_map = { - 'auth_type': {'key': 'authType', 'type': 'str'}, - 'category': {'key': 'category', 'type': 'str'}, - 'created_by_workspace_arm_id': {'key': 'createdByWorkspaceArmId', 'type': 'str'}, - 'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'}, - 'group': {'key': 'group', 'type': 'str'}, - 'is_shared_to_all': {'key': 'isSharedToAll', 'type': 'bool'}, - 'metadata': {'key': 'metadata', 'type': 'object'}, - 'shared_user_list': {'key': 'sharedUserList', 'type': '[str]'}, - 'target': {'key': 'target', 'type': 'str'}, - 'credentials': {'key': 'credentials', 'type': 'CustomKeys'}, - } + All required parameters must be populated in order to send to Azure. - def __init__( - self, - **kwargs - ): + :ivar auth_type: Required. Authentication type of the connection target.Constant filled by + server. Possible values include: "PAT", "ManagedIdentity", "UsernamePassword", "None", "SAS", + "AccountKey", "ServicePrincipal", "AccessKey", "ApiKey", "CustomKeys", "OAuth2", "AAD". + :vartype auth_type: str or ~azure.mgmt.machinelearningservices.models.ConnectionAuthType + :ivar category: Category of the connection. Possible values include: "PythonFeed", + "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", + "AzureMySqlDb", "AzurePostgresDb", "ADLSGen2", "Redis", "ApiKey", "AzureOpenAI", + "CognitiveSearch", "CognitiveService", "CustomKeys", "AzureBlob", "AzureOneLake", "CosmosDb", + "CosmosDbMongoDbApi", "AzureDataExplorer", "AzureMariaDb", "AzureDatabricksDeltaLake", + "AzureSqlMi", "AzureTableStorage", "AmazonRdsForOracle", "AmazonRdsForSqlServer", + "AmazonRedshift", "Db2", "Drill", "GoogleBigQuery", "Greenplum", "Hbase", "Hive", "Impala", + "Informix", "MariaDb", "MicrosoftAccess", "MySql", "Netezza", "Oracle", "Phoenix", + "PostgreSql", "Presto", "SapOpenHub", "SapBw", "SapHana", "SapTable", "Spark", "SqlServer", + "Sybase", "Teradata", "Vertica", "Cassandra", "Couchbase", "MongoDbV2", "MongoDbAtlas", + "AmazonS3Compatible", "FileServer", "FtpServer", "GoogleCloudStorage", "Hdfs", + "OracleCloudStorage", "Sftp", "GenericHttp", "ODataRest", "Odbc", "GenericRest", "AmazonMws", + "Concur", "Dynamics", "DynamicsAx", "DynamicsCrm", "GoogleAdWords", "Hubspot", "Jira", + "Magento", "Marketo", "Office365", "Eloqua", "Responsys", "OracleServiceCloud", "PayPal", + "QuickBooks", "Salesforce", "SalesforceServiceCloud", "SalesforceMarketingCloud", + "SapCloudForCustomer", "SapEcc", "ServiceNow", "SharePointOnlineList", "Shopify", "Square", + "WebTable", "Xero", "Zoho", "GenericContainerRegistry". + :vartype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory + :ivar created_by_workspace_arm_id: + :vartype created_by_workspace_arm_id: str + :ivar expiry_time: + :vartype expiry_time: ~datetime.datetime + :ivar group: Group based on connection category. Possible values include: "Azure", "AzureAI", + "Database", "NoSQL", "File", "GenericProtocol", "ServicesAndApps". + :vartype group: str or ~azure.mgmt.machinelearningservices.models.ConnectionGroup + :ivar is_shared_to_all: + :vartype is_shared_to_all: bool + :ivar metadata: Any object. + :vartype metadata: any + :ivar shared_user_list: + :vartype shared_user_list: list[str] + :ivar target: + :vartype target: str + :ivar credentials: Custom Keys credential object. + :vartype credentials: ~azure.mgmt.machinelearningservices.models.CustomKeys + """ + + _validation = { + "auth_type": {"required": True}, + "created_by_workspace_arm_id": {"readonly": True}, + "group": {"readonly": True}, + } + + _attribute_map = { + "auth_type": {"key": "authType", "type": "str"}, + "category": {"key": "category", "type": "str"}, + "created_by_workspace_arm_id": {"key": "createdByWorkspaceArmId", "type": "str"}, + "expiry_time": {"key": "expiryTime", "type": "iso-8601"}, + "group": {"key": "group", "type": "str"}, + "is_shared_to_all": {"key": "isSharedToAll", "type": "bool"}, + "metadata": {"key": "metadata", "type": "object"}, + "shared_user_list": {"key": "sharedUserList", "type": "[str]"}, + "target": {"key": "target", "type": "str"}, + "credentials": {"key": "credentials", "type": "CustomKeys"}, + } + + def __init__(self, **kwargs): """ :keyword category: Category of the connection. Possible values include: "PythonFeed", "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", @@ -9778,8 +9250,8 @@ def __init__( :paramtype credentials: ~azure.mgmt.machinelearningservices.models.CustomKeys """ super(CustomKeysWorkspaceConnectionProperties, self).__init__(**kwargs) - self.auth_type = 'CustomKeys' # type: str - self.credentials = kwargs.get('credentials', None) + self.auth_type = "CustomKeys" # type: str + self.credentials = kwargs.get("credentials", None) class CustomMetricThreshold(msrest.serialization.Model): @@ -9795,18 +9267,15 @@ class CustomMetricThreshold(msrest.serialization.Model): """ _validation = { - 'metric': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "metric": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'metric': {'key': 'metric', 'type': 'str'}, - 'threshold': {'key': 'threshold', 'type': 'MonitoringThreshold'}, + "metric": {"key": "metric", "type": "str"}, + "threshold": {"key": "threshold", "type": "MonitoringThreshold"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword metric: Required. [Required] The user-defined metric to calculate. :paramtype metric: str @@ -9815,8 +9284,8 @@ def __init__( :paramtype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold """ super(CustomMetricThreshold, self).__init__(**kwargs) - self.metric = kwargs['metric'] - self.threshold = kwargs.get('threshold', None) + self.metric = kwargs["metric"] + self.threshold = kwargs.get("threshold", None) class CustomModelFineTuning(FineTuningVertical): @@ -9843,25 +9312,22 @@ class CustomModelFineTuning(FineTuningVertical): """ _validation = { - 'model': {'required': True}, - 'model_provider': {'required': True}, - 'task_type': {'required': True}, - 'training_data': {'required': True}, + "model": {"required": True}, + "model_provider": {"required": True}, + "task_type": {"required": True}, + "training_data": {"required": True}, } _attribute_map = { - 'model': {'key': 'model', 'type': 'MLFlowModelJobInput'}, - 'model_provider': {'key': 'modelProvider', 'type': 'str'}, - 'task_type': {'key': 'taskType', 'type': 'str'}, - 'training_data': {'key': 'trainingData', 'type': 'JobInput'}, - 'validation_data': {'key': 'validationData', 'type': 'JobInput'}, - 'hyper_parameters': {'key': 'hyperParameters', 'type': '{str}'}, + "model": {"key": "model", "type": "MLFlowModelJobInput"}, + "model_provider": {"key": "modelProvider", "type": "str"}, + "task_type": {"key": "taskType", "type": "str"}, + "training_data": {"key": "trainingData", "type": "JobInput"}, + "validation_data": {"key": "validationData", "type": "JobInput"}, + "hyper_parameters": {"key": "hyperParameters", "type": "{str}"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword model: Required. [Required] Input model for fine tuning. :paramtype model: ~azure.mgmt.machinelearningservices.models.MLFlowModelJobInput @@ -9878,8 +9344,8 @@ def __init__( :paramtype hyper_parameters: dict[str, str] """ super(CustomModelFineTuning, self).__init__(**kwargs) - self.model_provider = 'Custom' # type: str - self.hyper_parameters = kwargs.get('hyper_parameters', None) + self.model_provider = "Custom" # type: str + self.hyper_parameters = kwargs.get("hyper_parameters", None) class JobInput(msrest.serialization.Model): @@ -9899,31 +9365,33 @@ class JobInput(msrest.serialization.Model): """ _validation = { - 'job_input_type': {'required': True}, + "job_input_type": {"required": True}, } _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'job_input_type': {'key': 'jobInputType', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "job_input_type": {"key": "jobInputType", "type": "str"}, } _subtype_map = { - 'job_input_type': {'custom_model': 'CustomModelJobInput', 'literal': 'LiteralJobInput', - 'mlflow_model': 'MLFlowModelJobInput', 'mltable': 'MLTableJobInput', - 'triton_model': 'TritonModelJobInput', 'uri_file': 'UriFileJobInput', - 'uri_folder': 'UriFolderJobInput'} + "job_input_type": { + "custom_model": "CustomModelJobInput", + "literal": "LiteralJobInput", + "mlflow_model": "MLFlowModelJobInput", + "mltable": "MLTableJobInput", + "triton_model": "TritonModelJobInput", + "uri_file": "UriFileJobInput", + "uri_folder": "UriFolderJobInput", + } } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword description: Description for the input. :paramtype description: str """ super(JobInput, self).__init__(**kwargs) - self.description = kwargs.get('description', None) + self.description = kwargs.get("description", None) self.job_input_type = None # type: Optional[str] @@ -9948,22 +9416,19 @@ class CustomModelJobInput(JobInput, AssetJobInput): """ _validation = { - 'uri': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'job_input_type': {'required': True}, + "uri": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "job_input_type": {"required": True}, } _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - 'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'}, - 'uri': {'key': 'uri', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'job_input_type': {'key': 'jobInputType', 'type': 'str'}, + "mode": {"key": "mode", "type": "str"}, + "path_on_compute": {"key": "pathOnCompute", "type": "str"}, + "uri": {"key": "uri", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "job_input_type": {"key": "jobInputType", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword mode: Input Asset Delivery Mode. Possible values include: "ReadOnlyMount", "ReadWriteMount", "Download", "Direct", "EvalMount", "EvalDownload". @@ -9976,11 +9441,11 @@ def __init__( :paramtype description: str """ super(CustomModelJobInput, self).__init__(**kwargs) - self.mode = kwargs.get('mode', None) - self.path_on_compute = kwargs.get('path_on_compute', None) - self.uri = kwargs['uri'] - self.job_input_type = 'custom_model' # type: str - self.description = kwargs.get('description', None) + self.mode = kwargs.get("mode", None) + self.path_on_compute = kwargs.get("path_on_compute", None) + self.uri = kwargs["uri"] + self.job_input_type = "custom_model" # type: str + self.description = kwargs.get("description", None) class JobOutput(msrest.serialization.Model): @@ -10000,30 +9465,32 @@ class JobOutput(msrest.serialization.Model): """ _validation = { - 'job_output_type': {'required': True}, + "job_output_type": {"required": True}, } _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'job_output_type': {'key': 'jobOutputType', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "job_output_type": {"key": "jobOutputType", "type": "str"}, } _subtype_map = { - 'job_output_type': {'custom_model': 'CustomModelJobOutput', 'mlflow_model': 'MLFlowModelJobOutput', - 'mltable': 'MLTableJobOutput', 'triton_model': 'TritonModelJobOutput', - 'uri_file': 'UriFileJobOutput', 'uri_folder': 'UriFolderJobOutput'} + "job_output_type": { + "custom_model": "CustomModelJobOutput", + "mlflow_model": "MLFlowModelJobOutput", + "mltable": "MLTableJobOutput", + "triton_model": "TritonModelJobOutput", + "uri_file": "UriFileJobOutput", + "uri_folder": "UriFolderJobOutput", + } } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword description: Description for the output. :paramtype description: str """ super(JobOutput, self).__init__(**kwargs) - self.description = kwargs.get('description', None) + self.description = kwargs.get("description", None) self.job_output_type = None # type: Optional[str] @@ -10054,24 +9521,21 @@ class CustomModelJobOutput(JobOutput, AssetJobOutput): """ _validation = { - 'job_output_type': {'required': True}, + "job_output_type": {"required": True}, } _attribute_map = { - 'asset_name': {'key': 'assetName', 'type': 'str'}, - 'asset_version': {'key': 'assetVersion', 'type': 'str'}, - 'auto_delete_setting': {'key': 'autoDeleteSetting', 'type': 'AutoDeleteSetting'}, - 'mode': {'key': 'mode', 'type': 'str'}, - 'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'}, - 'uri': {'key': 'uri', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'job_output_type': {'key': 'jobOutputType', 'type': 'str'}, + "asset_name": {"key": "assetName", "type": "str"}, + "asset_version": {"key": "assetVersion", "type": "str"}, + "auto_delete_setting": {"key": "autoDeleteSetting", "type": "AutoDeleteSetting"}, + "mode": {"key": "mode", "type": "str"}, + "path_on_compute": {"key": "pathOnCompute", "type": "str"}, + "uri": {"key": "uri", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "job_output_type": {"key": "jobOutputType", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword asset_name: Output Asset Name. :paramtype asset_name: str @@ -10090,14 +9554,14 @@ def __init__( :paramtype description: str """ super(CustomModelJobOutput, self).__init__(**kwargs) - self.asset_name = kwargs.get('asset_name', None) - self.asset_version = kwargs.get('asset_version', None) - self.auto_delete_setting = kwargs.get('auto_delete_setting', None) - self.mode = kwargs.get('mode', None) - self.path_on_compute = kwargs.get('path_on_compute', None) - self.uri = kwargs.get('uri', None) - self.job_output_type = 'custom_model' # type: str - self.description = kwargs.get('description', None) + self.asset_name = kwargs.get("asset_name", None) + self.asset_version = kwargs.get("asset_version", None) + self.auto_delete_setting = kwargs.get("auto_delete_setting", None) + self.mode = kwargs.get("mode", None) + self.path_on_compute = kwargs.get("path_on_compute", None) + self.uri = kwargs.get("uri", None) + self.job_output_type = "custom_model" # type: str + self.description = kwargs.get("description", None) class MonitoringSignalBase(msrest.serialization.Model): @@ -10121,29 +9585,29 @@ class MonitoringSignalBase(msrest.serialization.Model): """ _validation = { - 'signal_type': {'required': True}, + "signal_type": {"required": True}, } _attribute_map = { - 'notification_types': {'key': 'notificationTypes', 'type': '[str]'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'signal_type': {'key': 'signalType', 'type': 'str'}, + "notification_types": {"key": "notificationTypes", "type": "[str]"}, + "properties": {"key": "properties", "type": "{str}"}, + "signal_type": {"key": "signalType", "type": "str"}, } _subtype_map = { - 'signal_type': {'Custom': 'CustomMonitoringSignal', 'DataDrift': 'DataDriftMonitoringSignal', - 'DataQuality': 'DataQualityMonitoringSignal', - 'FeatureAttributionDrift': 'FeatureAttributionDriftMonitoringSignal', - 'GenerationSafetyQuality': 'GenerationSafetyQualityMonitoringSignal', - 'GenerationTokenStatistics': 'GenerationTokenUsageSignal', - 'ModelPerformance': 'ModelPerformanceSignal', - 'PredictionDrift': 'PredictionDriftMonitoringSignal'} + "signal_type": { + "Custom": "CustomMonitoringSignal", + "DataDrift": "DataDriftMonitoringSignal", + "DataQuality": "DataQualityMonitoringSignal", + "FeatureAttributionDrift": "FeatureAttributionDriftMonitoringSignal", + "GenerationSafetyQuality": "GenerationSafetyQualityMonitoringSignal", + "GenerationTokenStatistics": "GenerationTokenUsageSignal", + "ModelPerformance": "ModelPerformanceSignal", + "PredictionDrift": "PredictionDriftMonitoringSignal", + } } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword notification_types: The current notification mode for this signal. :paramtype notification_types: list[str or @@ -10152,8 +9616,8 @@ def __init__( :paramtype properties: dict[str, str] """ super(MonitoringSignalBase, self).__init__(**kwargs) - self.notification_types = kwargs.get('notification_types', None) - self.properties = kwargs.get('properties', None) + self.notification_types = kwargs.get("notification_types", None) + self.properties = kwargs.get("properties", None) self.signal_type = None # type: Optional[str] @@ -10192,26 +9656,23 @@ class CustomMonitoringSignal(MonitoringSignalBase): """ _validation = { - 'signal_type': {'required': True}, - 'component_id': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'metric_thresholds': {'required': True}, + "signal_type": {"required": True}, + "component_id": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "metric_thresholds": {"required": True}, } _attribute_map = { - 'notification_types': {'key': 'notificationTypes', 'type': '[str]'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'signal_type': {'key': 'signalType', 'type': 'str'}, - 'component_id': {'key': 'componentId', 'type': 'str'}, - 'input_assets': {'key': 'inputAssets', 'type': '{MonitoringInputDataBase}'}, - 'inputs': {'key': 'inputs', 'type': '{JobInput}'}, - 'metric_thresholds': {'key': 'metricThresholds', 'type': '[CustomMetricThreshold]'}, - 'workspace_connection': {'key': 'workspaceConnection', 'type': 'MonitoringWorkspaceConnection'}, + "notification_types": {"key": "notificationTypes", "type": "[str]"}, + "properties": {"key": "properties", "type": "{str}"}, + "signal_type": {"key": "signalType", "type": "str"}, + "component_id": {"key": "componentId", "type": "str"}, + "input_assets": {"key": "inputAssets", "type": "{MonitoringInputDataBase}"}, + "inputs": {"key": "inputs", "type": "{JobInput}"}, + "metric_thresholds": {"key": "metricThresholds", "type": "[CustomMetricThreshold]"}, + "workspace_connection": {"key": "workspaceConnection", "type": "MonitoringWorkspaceConnection"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword notification_types: The current notification mode for this signal. :paramtype notification_types: list[str or @@ -10237,12 +9698,12 @@ def __init__( ~azure.mgmt.machinelearningservices.models.MonitoringWorkspaceConnection """ super(CustomMonitoringSignal, self).__init__(**kwargs) - self.signal_type = 'Custom' # type: str - self.component_id = kwargs['component_id'] - self.input_assets = kwargs.get('input_assets', None) - self.inputs = kwargs.get('inputs', None) - self.metric_thresholds = kwargs['metric_thresholds'] - self.workspace_connection = kwargs.get('workspace_connection', None) + self.signal_type = "Custom" # type: str + self.component_id = kwargs["component_id"] + self.input_assets = kwargs.get("input_assets", None) + self.inputs = kwargs.get("inputs", None) + self.metric_thresholds = kwargs["metric_thresholds"] + self.workspace_connection = kwargs.get("workspace_connection", None) class CustomNCrossValidations(NCrossValidations): @@ -10258,26 +9719,23 @@ class CustomNCrossValidations(NCrossValidations): """ _validation = { - 'mode': {'required': True}, - 'value': {'required': True}, + "mode": {"required": True}, + "value": {"required": True}, } _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'int'}, + "mode": {"key": "mode", "type": "str"}, + "value": {"key": "value", "type": "int"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword value: Required. [Required] N-Cross validations value. :paramtype value: int """ super(CustomNCrossValidations, self).__init__(**kwargs) - self.mode = 'Custom' # type: str - self.value = kwargs['value'] + self.mode = "Custom" # type: str + self.value = kwargs["value"] class CustomSeasonality(Seasonality): @@ -10293,26 +9751,23 @@ class CustomSeasonality(Seasonality): """ _validation = { - 'mode': {'required': True}, - 'value': {'required': True}, + "mode": {"required": True}, + "value": {"required": True}, } _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'int'}, + "mode": {"key": "mode", "type": "str"}, + "value": {"key": "value", "type": "int"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword value: Required. [Required] Seasonality value. :paramtype value: int """ super(CustomSeasonality, self).__init__(**kwargs) - self.mode = 'Custom' # type: str - self.value = kwargs['value'] + self.mode = "Custom" # type: str + self.value = kwargs["value"] class CustomService(msrest.serialization.Model): @@ -10339,20 +9794,17 @@ class CustomService(msrest.serialization.Model): """ _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'image': {'key': 'image', 'type': 'Image'}, - 'environment_variables': {'key': 'environmentVariables', 'type': '{EnvironmentVariable}'}, - 'docker': {'key': 'docker', 'type': 'Docker'}, - 'endpoints': {'key': 'endpoints', 'type': '[Endpoint]'}, - 'volumes': {'key': 'volumes', 'type': '[VolumeDefinition]'}, - 'kernel': {'key': 'kernel', 'type': 'JupyterKernelConfig'}, + "additional_properties": {"key": "", "type": "{object}"}, + "name": {"key": "name", "type": "str"}, + "image": {"key": "image", "type": "Image"}, + "environment_variables": {"key": "environmentVariables", "type": "{EnvironmentVariable}"}, + "docker": {"key": "docker", "type": "Docker"}, + "endpoints": {"key": "endpoints", "type": "[Endpoint]"}, + "volumes": {"key": "volumes", "type": "[VolumeDefinition]"}, + "kernel": {"key": "kernel", "type": "JupyterKernelConfig"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. @@ -10375,14 +9827,14 @@ def __init__( :paramtype kernel: ~azure.mgmt.machinelearningservices.models.JupyterKernelConfig """ super(CustomService, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.name = kwargs.get('name', None) - self.image = kwargs.get('image', None) - self.environment_variables = kwargs.get('environment_variables', None) - self.docker = kwargs.get('docker', None) - self.endpoints = kwargs.get('endpoints', None) - self.volumes = kwargs.get('volumes', None) - self.kernel = kwargs.get('kernel', None) + self.additional_properties = kwargs.get("additional_properties", None) + self.name = kwargs.get("name", None) + self.image = kwargs.get("image", None) + self.environment_variables = kwargs.get("environment_variables", None) + self.docker = kwargs.get("docker", None) + self.endpoints = kwargs.get("endpoints", None) + self.volumes = kwargs.get("volumes", None) + self.kernel = kwargs.get("kernel", None) class CustomTargetLags(TargetLags): @@ -10398,26 +9850,23 @@ class CustomTargetLags(TargetLags): """ _validation = { - 'mode': {'required': True}, - 'values': {'required': True}, + "mode": {"required": True}, + "values": {"required": True}, } _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - 'values': {'key': 'values', 'type': '[int]'}, + "mode": {"key": "mode", "type": "str"}, + "values": {"key": "values", "type": "[int]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword values: Required. [Required] Set target lags values. :paramtype values: list[int] """ super(CustomTargetLags, self).__init__(**kwargs) - self.mode = 'Custom' # type: str - self.values = kwargs['values'] + self.mode = "Custom" # type: str + self.values = kwargs["values"] class CustomTargetRollingWindowSize(TargetRollingWindowSize): @@ -10433,26 +9882,23 @@ class CustomTargetRollingWindowSize(TargetRollingWindowSize): """ _validation = { - 'mode': {'required': True}, - 'value': {'required': True}, + "mode": {"required": True}, + "value": {"required": True}, } _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'int'}, + "mode": {"key": "mode", "type": "str"}, + "value": {"key": "value", "type": "int"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword value: Required. [Required] TargetRollingWindowSize value. :paramtype value: int """ super(CustomTargetRollingWindowSize, self).__init__(**kwargs) - self.mode = 'Custom' # type: str - self.value = kwargs['value'] + self.mode = "Custom" # type: str + self.value = kwargs["value"] class DataImportSource(msrest.serialization.Model): @@ -10471,28 +9917,23 @@ class DataImportSource(msrest.serialization.Model): """ _validation = { - 'source_type': {'required': True}, + "source_type": {"required": True}, } _attribute_map = { - 'connection': {'key': 'connection', 'type': 'str'}, - 'source_type': {'key': 'sourceType', 'type': 'str'}, + "connection": {"key": "connection", "type": "str"}, + "source_type": {"key": "sourceType", "type": "str"}, } - _subtype_map = { - 'source_type': {'database': 'DatabaseSource', 'file_system': 'FileSystemSource'} - } + _subtype_map = {"source_type": {"database": "DatabaseSource", "file_system": "FileSystemSource"}} - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword connection: Workspace connection for data import source storage. :paramtype connection: str """ super(DataImportSource, self).__init__(**kwargs) - self.connection = kwargs.get('connection', None) + self.connection = kwargs.get("connection", None) self.source_type = None # type: Optional[str] @@ -10517,22 +9958,19 @@ class DatabaseSource(DataImportSource): """ _validation = { - 'source_type': {'required': True}, + "source_type": {"required": True}, } _attribute_map = { - 'connection': {'key': 'connection', 'type': 'str'}, - 'source_type': {'key': 'sourceType', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'str'}, - 'stored_procedure': {'key': 'storedProcedure', 'type': 'str'}, - 'stored_procedure_params': {'key': 'storedProcedureParams', 'type': '[{str}]'}, - 'table_name': {'key': 'tableName', 'type': 'str'}, + "connection": {"key": "connection", "type": "str"}, + "source_type": {"key": "sourceType", "type": "str"}, + "query": {"key": "query", "type": "str"}, + "stored_procedure": {"key": "storedProcedure", "type": "str"}, + "stored_procedure_params": {"key": "storedProcedureParams", "type": "[{str}]"}, + "table_name": {"key": "tableName", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword connection: Workspace connection for data import source storage. :paramtype connection: str @@ -10546,11 +9984,11 @@ def __init__( :paramtype table_name: str """ super(DatabaseSource, self).__init__(**kwargs) - self.source_type = 'database' # type: str - self.query = kwargs.get('query', None) - self.stored_procedure = kwargs.get('stored_procedure', None) - self.stored_procedure_params = kwargs.get('stored_procedure_params', None) - self.table_name = kwargs.get('table_name', None) + self.source_type = "database" # type: str + self.query = kwargs.get("query", None) + self.stored_procedure = kwargs.get("stored_procedure", None) + self.stored_procedure_params = kwargs.get("stored_procedure_params", None) + self.table_name = kwargs.get("table_name", None) class DatabricksSchema(msrest.serialization.Model): @@ -10561,19 +9999,16 @@ class DatabricksSchema(msrest.serialization.Model): """ _attribute_map = { - 'properties': {'key': 'properties', 'type': 'DatabricksProperties'}, + "properties": {"key": "properties", "type": "DatabricksProperties"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword properties: Properties of Databricks. :paramtype properties: ~azure.mgmt.machinelearningservices.models.DatabricksProperties """ super(DatabricksSchema, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) + self.properties = kwargs.get("properties", None) class Databricks(Compute, DatabricksSchema): @@ -10615,32 +10050,29 @@ class Databricks(Compute, DatabricksSchema): """ _validation = { - 'compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, + "compute_type": {"required": True}, + "provisioning_state": {"readonly": True}, + "created_on": {"readonly": True}, + "modified_on": {"readonly": True}, + "provisioning_errors": {"readonly": True}, + "is_attached_compute": {"readonly": True}, } _attribute_map = { - 'properties': {'key': 'properties', 'type': 'DatabricksProperties'}, - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, + "properties": {"key": "properties", "type": "DatabricksProperties"}, + "compute_type": {"key": "computeType", "type": "str"}, + "compute_location": {"key": "computeLocation", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "created_on": {"key": "createdOn", "type": "iso-8601"}, + "modified_on": {"key": "modifiedOn", "type": "iso-8601"}, + "resource_id": {"key": "resourceId", "type": "str"}, + "provisioning_errors": {"key": "provisioningErrors", "type": "[ErrorResponse]"}, + "is_attached_compute": {"key": "isAttachedCompute", "type": "bool"}, + "disable_local_auth": {"key": "disableLocalAuth", "type": "bool"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword properties: Properties of Databricks. :paramtype properties: ~azure.mgmt.machinelearningservices.models.DatabricksProperties @@ -10655,17 +10087,17 @@ def __init__( :paramtype disable_local_auth: bool """ super(Databricks, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) - self.compute_type = 'Databricks' # type: str - self.compute_location = kwargs.get('compute_location', None) + self.properties = kwargs.get("properties", None) + self.compute_type = "Databricks" # type: str + self.compute_location = kwargs.get("compute_location", None) self.provisioning_state = None - self.description = kwargs.get('description', None) + self.description = kwargs.get("description", None) self.created_on = None self.modified_on = None - self.resource_id = kwargs.get('resource_id', None) + self.resource_id = kwargs.get("resource_id", None) self.provisioning_errors = None self.is_attached_compute = None - self.disable_local_auth = kwargs.get('disable_local_auth', None) + self.disable_local_auth = kwargs.get("disable_local_auth", None) class DatabricksComputeSecretsProperties(msrest.serialization.Model): @@ -10676,19 +10108,16 @@ class DatabricksComputeSecretsProperties(msrest.serialization.Model): """ _attribute_map = { - 'databricks_access_token': {'key': 'databricksAccessToken', 'type': 'str'}, + "databricks_access_token": {"key": "databricksAccessToken", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword databricks_access_token: access token for databricks account. :paramtype databricks_access_token: str """ super(DatabricksComputeSecretsProperties, self).__init__(**kwargs) - self.databricks_access_token = kwargs.get('databricks_access_token', None) + self.databricks_access_token = kwargs.get("databricks_access_token", None) class DatabricksComputeSecrets(ComputeSecrets, DatabricksComputeSecretsProperties): @@ -10705,25 +10134,22 @@ class DatabricksComputeSecrets(ComputeSecrets, DatabricksComputeSecretsPropertie """ _validation = { - 'compute_type': {'required': True}, + "compute_type": {"required": True}, } _attribute_map = { - 'databricks_access_token': {'key': 'databricksAccessToken', 'type': 'str'}, - 'compute_type': {'key': 'computeType', 'type': 'str'}, + "databricks_access_token": {"key": "databricksAccessToken", "type": "str"}, + "compute_type": {"key": "computeType", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword databricks_access_token: access token for databricks account. :paramtype databricks_access_token: str """ super(DatabricksComputeSecrets, self).__init__(**kwargs) - self.databricks_access_token = kwargs.get('databricks_access_token', None) - self.compute_type = 'Databricks' # type: str + self.databricks_access_token = kwargs.get("databricks_access_token", None) + self.compute_type = "Databricks" # type: str class DatabricksProperties(msrest.serialization.Model): @@ -10736,14 +10162,11 @@ class DatabricksProperties(msrest.serialization.Model): """ _attribute_map = { - 'databricks_access_token': {'key': 'databricksAccessToken', 'type': 'str'}, - 'workspace_url': {'key': 'workspaceUrl', 'type': 'str'}, + "databricks_access_token": {"key": "databricksAccessToken", "type": "str"}, + "workspace_url": {"key": "workspaceUrl", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword databricks_access_token: Databricks access token. :paramtype databricks_access_token: str @@ -10751,8 +10174,8 @@ def __init__( :paramtype workspace_url: str """ super(DatabricksProperties, self).__init__(**kwargs) - self.databricks_access_token = kwargs.get('databricks_access_token', None) - self.workspace_url = kwargs.get('workspace_url', None) + self.databricks_access_token = kwargs.get("databricks_access_token", None) + self.workspace_url = kwargs.get("workspace_url", None) class DataCollector(msrest.serialization.Model): @@ -10779,19 +10202,16 @@ class DataCollector(msrest.serialization.Model): """ _validation = { - 'collections': {'required': True}, + "collections": {"required": True}, } _attribute_map = { - 'collections': {'key': 'collections', 'type': '{Collection}'}, - 'request_logging': {'key': 'requestLogging', 'type': 'RequestLogging'}, - 'rolling_rate': {'key': 'rollingRate', 'type': 'str'}, + "collections": {"key": "collections", "type": "{Collection}"}, + "request_logging": {"key": "requestLogging", "type": "RequestLogging"}, + "rolling_rate": {"key": "rollingRate", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword collections: Required. [Required] The collection configuration. Each collection has it own configuration to collect model data and the name of collection can be arbitrary string. @@ -10811,9 +10231,9 @@ def __init__( :paramtype rolling_rate: str or ~azure.mgmt.machinelearningservices.models.RollingRateType """ super(DataCollector, self).__init__(**kwargs) - self.collections = kwargs['collections'] - self.request_logging = kwargs.get('request_logging', None) - self.rolling_rate = kwargs.get('rolling_rate', None) + self.collections = kwargs["collections"] + self.request_logging = kwargs.get("request_logging", None) + self.rolling_rate = kwargs.get("rolling_rate", None) class DataContainer(ProxyResource): @@ -10839,31 +10259,28 @@ class DataContainer(ProxyResource): """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'properties': {'key': 'properties', 'type': 'DataContainerProperties'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "DataContainerProperties"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword properties: Required. [Required] Additional attributes of the entity. :paramtype properties: ~azure.mgmt.machinelearningservices.models.DataContainerProperties """ super(DataContainer, self).__init__(**kwargs) - self.properties = kwargs['properties'] + self.properties = kwargs["properties"] class DataContainerProperties(AssetContainer): @@ -10891,25 +10308,22 @@ class DataContainerProperties(AssetContainer): """ _validation = { - 'latest_version': {'readonly': True}, - 'next_version': {'readonly': True}, - 'data_type': {'required': True}, + "latest_version": {"readonly": True}, + "next_version": {"readonly": True}, + "data_type": {"required": True}, } _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'latest_version': {'key': 'latestVersion', 'type': 'str'}, - 'next_version': {'key': 'nextVersion', 'type': 'str'}, - 'data_type': {'key': 'dataType', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "latest_version": {"key": "latestVersion", "type": "str"}, + "next_version": {"key": "nextVersion", "type": "str"}, + "data_type": {"key": "dataType", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword description: The asset description text. :paramtype description: str @@ -10924,7 +10338,7 @@ def __init__( :paramtype data_type: str or ~azure.mgmt.machinelearningservices.models.DataType """ super(DataContainerProperties, self).__init__(**kwargs) - self.data_type = kwargs['data_type'] + self.data_type = kwargs["data_type"] class DataContainerResourceArmPaginatedResult(msrest.serialization.Model): @@ -10938,14 +10352,11 @@ class DataContainerResourceArmPaginatedResult(msrest.serialization.Model): """ _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[DataContainer]'}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[DataContainer]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword next_link: The link to the next page of DataContainer objects. If null, there are no additional pages. @@ -10954,8 +10365,8 @@ def __init__( :paramtype value: list[~azure.mgmt.machinelearningservices.models.DataContainer] """ super(DataContainerResourceArmPaginatedResult, self).__init__(**kwargs) - self.next_link = kwargs.get('next_link', None) - self.value = kwargs.get('value', None) + self.next_link = kwargs.get("next_link", None) + self.value = kwargs.get("value", None) class DataDriftMonitoringSignal(MonitoringSignalBase): @@ -10995,29 +10406,26 @@ class DataDriftMonitoringSignal(MonitoringSignalBase): """ _validation = { - 'signal_type': {'required': True}, - 'metric_thresholds': {'required': True}, - 'production_data': {'required': True}, - 'reference_data': {'required': True}, + "signal_type": {"required": True}, + "metric_thresholds": {"required": True}, + "production_data": {"required": True}, + "reference_data": {"required": True}, } _attribute_map = { - 'notification_types': {'key': 'notificationTypes', 'type': '[str]'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'signal_type': {'key': 'signalType', 'type': 'str'}, - 'data_segment': {'key': 'dataSegment', 'type': 'MonitoringDataSegment'}, - 'feature_data_type_override': {'key': 'featureDataTypeOverride', 'type': '{str}'}, - 'feature_importance_settings': {'key': 'featureImportanceSettings', 'type': 'FeatureImportanceSettings'}, - 'features': {'key': 'features', 'type': 'MonitoringFeatureFilterBase'}, - 'metric_thresholds': {'key': 'metricThresholds', 'type': '[DataDriftMetricThresholdBase]'}, - 'production_data': {'key': 'productionData', 'type': 'MonitoringInputDataBase'}, - 'reference_data': {'key': 'referenceData', 'type': 'MonitoringInputDataBase'}, + "notification_types": {"key": "notificationTypes", "type": "[str]"}, + "properties": {"key": "properties", "type": "{str}"}, + "signal_type": {"key": "signalType", "type": "str"}, + "data_segment": {"key": "dataSegment", "type": "MonitoringDataSegment"}, + "feature_data_type_override": {"key": "featureDataTypeOverride", "type": "{str}"}, + "feature_importance_settings": {"key": "featureImportanceSettings", "type": "FeatureImportanceSettings"}, + "features": {"key": "features", "type": "MonitoringFeatureFilterBase"}, + "metric_thresholds": {"key": "metricThresholds", "type": "[DataDriftMetricThresholdBase]"}, + "production_data": {"key": "productionData", "type": "MonitoringInputDataBase"}, + "reference_data": {"key": "referenceData", "type": "MonitoringInputDataBase"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword notification_types: The current notification mode for this signal. :paramtype notification_types: list[str or @@ -11045,14 +10453,14 @@ def __init__( :paramtype reference_data: ~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase """ super(DataDriftMonitoringSignal, self).__init__(**kwargs) - self.signal_type = 'DataDrift' # type: str - self.data_segment = kwargs.get('data_segment', None) - self.feature_data_type_override = kwargs.get('feature_data_type_override', None) - self.feature_importance_settings = kwargs.get('feature_importance_settings', None) - self.features = kwargs.get('features', None) - self.metric_thresholds = kwargs['metric_thresholds'] - self.production_data = kwargs['production_data'] - self.reference_data = kwargs['reference_data'] + self.signal_type = "DataDrift" # type: str + self.data_segment = kwargs.get("data_segment", None) + self.feature_data_type_override = kwargs.get("feature_data_type_override", None) + self.feature_importance_settings = kwargs.get("feature_importance_settings", None) + self.features = kwargs.get("features", None) + self.metric_thresholds = kwargs["metric_thresholds"] + self.production_data = kwargs["production_data"] + self.reference_data = kwargs["reference_data"] class DataFactory(Compute): @@ -11092,31 +10500,28 @@ class DataFactory(Compute): """ _validation = { - 'compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, + "compute_type": {"required": True}, + "provisioning_state": {"readonly": True}, + "created_on": {"readonly": True}, + "modified_on": {"readonly": True}, + "provisioning_errors": {"readonly": True}, + "is_attached_compute": {"readonly": True}, } _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, + "compute_type": {"key": "computeType", "type": "str"}, + "compute_location": {"key": "computeLocation", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "created_on": {"key": "createdOn", "type": "iso-8601"}, + "modified_on": {"key": "modifiedOn", "type": "iso-8601"}, + "resource_id": {"key": "resourceId", "type": "str"}, + "provisioning_errors": {"key": "provisioningErrors", "type": "[ErrorResponse]"}, + "is_attached_compute": {"key": "isAttachedCompute", "type": "bool"}, + "disable_local_auth": {"key": "disableLocalAuth", "type": "bool"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword compute_location: Location for the underlying compute. :paramtype compute_location: str @@ -11129,7 +10534,7 @@ def __init__( :paramtype disable_local_auth: bool """ super(DataFactory, self).__init__(**kwargs) - self.compute_type = 'DataFactory' # type: str + self.compute_type = "DataFactory" # type: str class DataVersionBaseProperties(AssetBase): @@ -11168,31 +10573,28 @@ class DataVersionBaseProperties(AssetBase): """ _validation = { - 'data_type': {'required': True}, - 'data_uri': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "data_type": {"required": True}, + "data_uri": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'auto_delete_setting': {'key': 'autoDeleteSetting', 'type': 'AutoDeleteSetting'}, - 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'data_uri': {'key': 'dataUri', 'type': 'str'}, - 'intellectual_property': {'key': 'intellectualProperty', 'type': 'IntellectualProperty'}, - 'stage': {'key': 'stage', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "auto_delete_setting": {"key": "autoDeleteSetting", "type": "AutoDeleteSetting"}, + "is_anonymous": {"key": "isAnonymous", "type": "bool"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "data_type": {"key": "dataType", "type": "str"}, + "data_uri": {"key": "dataUri", "type": "str"}, + "intellectual_property": {"key": "intellectualProperty", "type": "IntellectualProperty"}, + "stage": {"key": "stage", "type": "str"}, } _subtype_map = { - 'data_type': {'mltable': 'MLTableData', 'uri_file': 'UriFileDataVersion', 'uri_folder': 'UriFolderDataVersion'} + "data_type": {"mltable": "MLTableData", "uri_file": "UriFileDataVersion", "uri_folder": "UriFolderDataVersion"} } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword description: The asset description text. :paramtype description: str @@ -11219,10 +10621,10 @@ def __init__( :paramtype stage: str """ super(DataVersionBaseProperties, self).__init__(**kwargs) - self.data_type = 'DataVersionBaseProperties' # type: str - self.data_uri = kwargs['data_uri'] - self.intellectual_property = kwargs.get('intellectual_property', None) - self.stage = kwargs.get('stage', None) + self.data_type = "DataVersionBaseProperties" # type: str + self.data_uri = kwargs["data_uri"] + self.intellectual_property = kwargs.get("intellectual_property", None) + self.stage = kwargs.get("stage", None) class DataImport(DataVersionBaseProperties): @@ -11262,29 +10664,26 @@ class DataImport(DataVersionBaseProperties): """ _validation = { - 'data_type': {'required': True}, - 'data_uri': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "data_type": {"required": True}, + "data_uri": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'auto_delete_setting': {'key': 'autoDeleteSetting', 'type': 'AutoDeleteSetting'}, - 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'data_uri': {'key': 'dataUri', 'type': 'str'}, - 'intellectual_property': {'key': 'intellectualProperty', 'type': 'IntellectualProperty'}, - 'stage': {'key': 'stage', 'type': 'str'}, - 'asset_name': {'key': 'assetName', 'type': 'str'}, - 'source': {'key': 'source', 'type': 'DataImportSource'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "auto_delete_setting": {"key": "autoDeleteSetting", "type": "AutoDeleteSetting"}, + "is_anonymous": {"key": "isAnonymous", "type": "bool"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "data_type": {"key": "dataType", "type": "str"}, + "data_uri": {"key": "dataUri", "type": "str"}, + "intellectual_property": {"key": "intellectualProperty", "type": "IntellectualProperty"}, + "stage": {"key": "stage", "type": "str"}, + "asset_name": {"key": "assetName", "type": "str"}, + "source": {"key": "source", "type": "DataImportSource"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword description: The asset description text. :paramtype description: str @@ -11315,9 +10714,9 @@ def __init__( :paramtype source: ~azure.mgmt.machinelearningservices.models.DataImportSource """ super(DataImport, self).__init__(**kwargs) - self.data_type = 'uri_folder' # type: str - self.asset_name = kwargs.get('asset_name', None) - self.source = kwargs.get('source', None) + self.data_type = "uri_folder" # type: str + self.asset_name = kwargs.get("asset_name", None) + self.source = kwargs.get("source", None) class DataLakeAnalyticsSchema(msrest.serialization.Model): @@ -11329,20 +10728,17 @@ class DataLakeAnalyticsSchema(msrest.serialization.Model): """ _attribute_map = { - 'properties': {'key': 'properties', 'type': 'DataLakeAnalyticsSchemaProperties'}, + "properties": {"key": "properties", "type": "DataLakeAnalyticsSchemaProperties"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword properties: :paramtype properties: ~azure.mgmt.machinelearningservices.models.DataLakeAnalyticsSchemaProperties """ super(DataLakeAnalyticsSchema, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) + self.properties = kwargs.get("properties", None) class DataLakeAnalytics(Compute, DataLakeAnalyticsSchema): @@ -11385,32 +10781,29 @@ class DataLakeAnalytics(Compute, DataLakeAnalyticsSchema): """ _validation = { - 'compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, + "compute_type": {"required": True}, + "provisioning_state": {"readonly": True}, + "created_on": {"readonly": True}, + "modified_on": {"readonly": True}, + "provisioning_errors": {"readonly": True}, + "is_attached_compute": {"readonly": True}, } _attribute_map = { - 'properties': {'key': 'properties', 'type': 'DataLakeAnalyticsSchemaProperties'}, - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, + "properties": {"key": "properties", "type": "DataLakeAnalyticsSchemaProperties"}, + "compute_type": {"key": "computeType", "type": "str"}, + "compute_location": {"key": "computeLocation", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "created_on": {"key": "createdOn", "type": "iso-8601"}, + "modified_on": {"key": "modifiedOn", "type": "iso-8601"}, + "resource_id": {"key": "resourceId", "type": "str"}, + "provisioning_errors": {"key": "provisioningErrors", "type": "[ErrorResponse]"}, + "is_attached_compute": {"key": "isAttachedCompute", "type": "bool"}, + "disable_local_auth": {"key": "disableLocalAuth", "type": "bool"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword properties: :paramtype properties: @@ -11426,17 +10819,17 @@ def __init__( :paramtype disable_local_auth: bool """ super(DataLakeAnalytics, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) - self.compute_type = 'DataLakeAnalytics' # type: str - self.compute_location = kwargs.get('compute_location', None) + self.properties = kwargs.get("properties", None) + self.compute_type = "DataLakeAnalytics" # type: str + self.compute_location = kwargs.get("compute_location", None) self.provisioning_state = None - self.description = kwargs.get('description', None) + self.description = kwargs.get("description", None) self.created_on = None self.modified_on = None - self.resource_id = kwargs.get('resource_id', None) + self.resource_id = kwargs.get("resource_id", None) self.provisioning_errors = None self.is_attached_compute = None - self.disable_local_auth = kwargs.get('disable_local_auth', None) + self.disable_local_auth = kwargs.get("disable_local_auth", None) class DataLakeAnalyticsSchemaProperties(msrest.serialization.Model): @@ -11447,19 +10840,16 @@ class DataLakeAnalyticsSchemaProperties(msrest.serialization.Model): """ _attribute_map = { - 'data_lake_store_account_name': {'key': 'dataLakeStoreAccountName', 'type': 'str'}, + "data_lake_store_account_name": {"key": "dataLakeStoreAccountName", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword data_lake_store_account_name: DataLake Store Account Name. :paramtype data_lake_store_account_name: str """ super(DataLakeAnalyticsSchemaProperties, self).__init__(**kwargs) - self.data_lake_store_account_name = kwargs.get('data_lake_store_account_name', None) + self.data_lake_store_account_name = kwargs.get("data_lake_store_account_name", None) class DataPathAssetReference(AssetReferenceBase): @@ -11477,19 +10867,16 @@ class DataPathAssetReference(AssetReferenceBase): """ _validation = { - 'reference_type': {'required': True}, + "reference_type": {"required": True}, } _attribute_map = { - 'reference_type': {'key': 'referenceType', 'type': 'str'}, - 'datastore_id': {'key': 'datastoreId', 'type': 'str'}, - 'path': {'key': 'path', 'type': 'str'}, + "reference_type": {"key": "referenceType", "type": "str"}, + "datastore_id": {"key": "datastoreId", "type": "str"}, + "path": {"key": "path", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword datastore_id: ARM resource ID of the datastore where the asset is located. :paramtype datastore_id: str @@ -11497,9 +10884,9 @@ def __init__( :paramtype path: str """ super(DataPathAssetReference, self).__init__(**kwargs) - self.reference_type = 'DataPath' # type: str - self.datastore_id = kwargs.get('datastore_id', None) - self.path = kwargs.get('path', None) + self.reference_type = "DataPath" # type: str + self.datastore_id = kwargs.get("datastore_id", None) + self.path = kwargs.get("path", None) class DataQualityMonitoringSignal(MonitoringSignalBase): @@ -11538,28 +10925,25 @@ class DataQualityMonitoringSignal(MonitoringSignalBase): """ _validation = { - 'signal_type': {'required': True}, - 'metric_thresholds': {'required': True}, - 'production_data': {'required': True}, - 'reference_data': {'required': True}, + "signal_type": {"required": True}, + "metric_thresholds": {"required": True}, + "production_data": {"required": True}, + "reference_data": {"required": True}, } _attribute_map = { - 'notification_types': {'key': 'notificationTypes', 'type': '[str]'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'signal_type': {'key': 'signalType', 'type': 'str'}, - 'feature_data_type_override': {'key': 'featureDataTypeOverride', 'type': '{str}'}, - 'feature_importance_settings': {'key': 'featureImportanceSettings', 'type': 'FeatureImportanceSettings'}, - 'features': {'key': 'features', 'type': 'MonitoringFeatureFilterBase'}, - 'metric_thresholds': {'key': 'metricThresholds', 'type': '[DataQualityMetricThresholdBase]'}, - 'production_data': {'key': 'productionData', 'type': 'MonitoringInputDataBase'}, - 'reference_data': {'key': 'referenceData', 'type': 'MonitoringInputDataBase'}, + "notification_types": {"key": "notificationTypes", "type": "[str]"}, + "properties": {"key": "properties", "type": "{str}"}, + "signal_type": {"key": "signalType", "type": "str"}, + "feature_data_type_override": {"key": "featureDataTypeOverride", "type": "{str}"}, + "feature_importance_settings": {"key": "featureImportanceSettings", "type": "FeatureImportanceSettings"}, + "features": {"key": "features", "type": "MonitoringFeatureFilterBase"}, + "metric_thresholds": {"key": "metricThresholds", "type": "[DataQualityMetricThresholdBase]"}, + "production_data": {"key": "productionData", "type": "MonitoringInputDataBase"}, + "reference_data": {"key": "referenceData", "type": "MonitoringInputDataBase"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword notification_types: The current notification mode for this signal. :paramtype notification_types: list[str or @@ -11586,13 +10970,13 @@ def __init__( :paramtype reference_data: ~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase """ super(DataQualityMonitoringSignal, self).__init__(**kwargs) - self.signal_type = 'DataQuality' # type: str - self.feature_data_type_override = kwargs.get('feature_data_type_override', None) - self.feature_importance_settings = kwargs.get('feature_importance_settings', None) - self.features = kwargs.get('features', None) - self.metric_thresholds = kwargs['metric_thresholds'] - self.production_data = kwargs['production_data'] - self.reference_data = kwargs['reference_data'] + self.signal_type = "DataQuality" # type: str + self.feature_data_type_override = kwargs.get("feature_data_type_override", None) + self.feature_importance_settings = kwargs.get("feature_importance_settings", None) + self.features = kwargs.get("features", None) + self.metric_thresholds = kwargs["metric_thresholds"] + self.production_data = kwargs["production_data"] + self.reference_data = kwargs["reference_data"] class DatasetExportSummary(ExportSummary): @@ -11618,31 +11002,27 @@ class DatasetExportSummary(ExportSummary): """ _validation = { - 'end_date_time': {'readonly': True}, - 'exported_row_count': {'readonly': True}, - 'format': {'required': True}, - 'labeling_job_id': {'readonly': True}, - 'start_date_time': {'readonly': True}, - 'labeled_asset_name': {'readonly': True}, + "end_date_time": {"readonly": True}, + "exported_row_count": {"readonly": True}, + "format": {"required": True}, + "labeling_job_id": {"readonly": True}, + "start_date_time": {"readonly": True}, + "labeled_asset_name": {"readonly": True}, } _attribute_map = { - 'end_date_time': {'key': 'endDateTime', 'type': 'iso-8601'}, - 'exported_row_count': {'key': 'exportedRowCount', 'type': 'long'}, - 'format': {'key': 'format', 'type': 'str'}, - 'labeling_job_id': {'key': 'labelingJobId', 'type': 'str'}, - 'start_date_time': {'key': 'startDateTime', 'type': 'iso-8601'}, - 'labeled_asset_name': {'key': 'labeledAssetName', 'type': 'str'}, + "end_date_time": {"key": "endDateTime", "type": "iso-8601"}, + "exported_row_count": {"key": "exportedRowCount", "type": "long"}, + "format": {"key": "format", "type": "str"}, + "labeling_job_id": {"key": "labelingJobId", "type": "str"}, + "start_date_time": {"key": "startDateTime", "type": "iso-8601"}, + "labeled_asset_name": {"key": "labeledAssetName", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(DatasetExportSummary, self).__init__(**kwargs) - self.format = 'Dataset' # type: str + self.format = "Dataset" # type: str self.labeled_asset_name = None @@ -11669,31 +11049,28 @@ class Datastore(ProxyResource): """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'properties': {'key': 'properties', 'type': 'DatastoreProperties'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "DatastoreProperties"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword properties: Required. [Required] Additional attributes of the entity. :paramtype properties: ~azure.mgmt.machinelearningservices.models.DatastoreProperties """ super(Datastore, self).__init__(**kwargs) - self.properties = kwargs['properties'] + self.properties = kwargs["properties"] class DatastoreResourceArmPaginatedResult(msrest.serialization.Model): @@ -11707,14 +11084,11 @@ class DatastoreResourceArmPaginatedResult(msrest.serialization.Model): """ _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[Datastore]'}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[Datastore]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword next_link: The link to the next page of Datastore objects. If null, there are no additional pages. @@ -11723,8 +11097,8 @@ def __init__( :paramtype value: list[~azure.mgmt.machinelearningservices.models.Datastore] """ super(DatastoreResourceArmPaginatedResult, self).__init__(**kwargs) - self.next_link = kwargs.get('next_link', None) - self.value = kwargs.get('value', None) + self.next_link = kwargs.get("next_link", None) + self.value = kwargs.get("value", None) class DataVersionBase(ProxyResource): @@ -11750,31 +11124,28 @@ class DataVersionBase(ProxyResource): """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'properties': {'key': 'properties', 'type': 'DataVersionBaseProperties'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "DataVersionBaseProperties"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword properties: Required. [Required] Additional attributes of the entity. :paramtype properties: ~azure.mgmt.machinelearningservices.models.DataVersionBaseProperties """ super(DataVersionBase, self).__init__(**kwargs) - self.properties = kwargs['properties'] + self.properties = kwargs["properties"] class DataVersionBaseResourceArmPaginatedResult(msrest.serialization.Model): @@ -11788,14 +11159,11 @@ class DataVersionBaseResourceArmPaginatedResult(msrest.serialization.Model): """ _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[DataVersionBase]'}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[DataVersionBase]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword next_link: The link to the next page of DataVersionBase objects. If null, there are no additional pages. @@ -11804,8 +11172,8 @@ def __init__( :paramtype value: list[~azure.mgmt.machinelearningservices.models.DataVersionBase] """ super(DataVersionBaseResourceArmPaginatedResult, self).__init__(**kwargs) - self.next_link = kwargs.get('next_link', None) - self.value = kwargs.get('value', None) + self.next_link = kwargs.get("next_link", None) + self.value = kwargs.get("value", None) class OnlineScaleSettings(msrest.serialization.Model): @@ -11822,23 +11190,19 @@ class OnlineScaleSettings(msrest.serialization.Model): """ _validation = { - 'scale_type': {'required': True}, + "scale_type": {"required": True}, } _attribute_map = { - 'scale_type': {'key': 'scaleType', 'type': 'str'}, + "scale_type": {"key": "scaleType", "type": "str"}, } _subtype_map = { - 'scale_type': {'Default': 'DefaultScaleSettings', 'TargetUtilization': 'TargetUtilizationScaleSettings'} + "scale_type": {"Default": "DefaultScaleSettings", "TargetUtilization": "TargetUtilizationScaleSettings"} } - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(OnlineScaleSettings, self).__init__(**kwargs) self.scale_type = None # type: Optional[str] @@ -11854,21 +11218,17 @@ class DefaultScaleSettings(OnlineScaleSettings): """ _validation = { - 'scale_type': {'required': True}, + "scale_type": {"required": True}, } _attribute_map = { - 'scale_type': {'key': 'scaleType', 'type': 'str'}, + "scale_type": {"key": "scaleType", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(DefaultScaleSettings, self).__init__(**kwargs) - self.scale_type = 'Default' # type: str + self.scale_type = "Default" # type: str class DeploymentLogs(msrest.serialization.Model): @@ -11879,19 +11239,16 @@ class DeploymentLogs(msrest.serialization.Model): """ _attribute_map = { - 'content': {'key': 'content', 'type': 'str'}, + "content": {"key": "content", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword content: The retrieved online deployment logs. :paramtype content: str """ super(DeploymentLogs, self).__init__(**kwargs) - self.content = kwargs.get('content', None) + self.content = kwargs.get("content", None) class DeploymentLogsRequest(msrest.serialization.Model): @@ -11905,14 +11262,11 @@ class DeploymentLogsRequest(msrest.serialization.Model): """ _attribute_map = { - 'container_type': {'key': 'containerType', 'type': 'str'}, - 'tail': {'key': 'tail', 'type': 'int'}, + "container_type": {"key": "containerType", "type": "str"}, + "tail": {"key": "tail", "type": "int"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword container_type: The type of container to retrieve logs from. Possible values include: "StorageInitializer", "InferenceServer", "ModelDataCollector". @@ -11921,8 +11275,8 @@ def __init__( :paramtype tail: int """ super(DeploymentLogsRequest, self).__init__(**kwargs) - self.container_type = kwargs.get('container_type', None) - self.tail = kwargs.get('tail', None) + self.container_type = kwargs.get("container_type", None) + self.tail = kwargs.get("tail", None) class ResourceConfiguration(msrest.serialization.Model): @@ -11943,17 +11297,14 @@ class ResourceConfiguration(msrest.serialization.Model): """ _attribute_map = { - 'instance_count': {'key': 'instanceCount', 'type': 'int'}, - 'instance_type': {'key': 'instanceType', 'type': 'str'}, - 'locations': {'key': 'locations', 'type': '[str]'}, - 'max_instance_count': {'key': 'maxInstanceCount', 'type': 'int'}, - 'properties': {'key': 'properties', 'type': '{object}'}, + "instance_count": {"key": "instanceCount", "type": "int"}, + "instance_type": {"key": "instanceType", "type": "str"}, + "locations": {"key": "locations", "type": "[str]"}, + "max_instance_count": {"key": "maxInstanceCount", "type": "int"}, + "properties": {"key": "properties", "type": "{object}"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword instance_count: Optional number of instances or nodes used by the compute target. :paramtype instance_count: int @@ -11969,11 +11320,11 @@ def __init__( :paramtype properties: dict[str, any] """ super(ResourceConfiguration, self).__init__(**kwargs) - self.instance_count = kwargs.get('instance_count', 1) - self.instance_type = kwargs.get('instance_type', None) - self.locations = kwargs.get('locations', None) - self.max_instance_count = kwargs.get('max_instance_count', None) - self.properties = kwargs.get('properties', None) + self.instance_count = kwargs.get("instance_count", 1) + self.instance_type = kwargs.get("instance_type", None) + self.locations = kwargs.get("locations", None) + self.max_instance_count = kwargs.get("max_instance_count", None) + self.properties = kwargs.get("properties", None) class DeploymentResourceConfiguration(ResourceConfiguration): @@ -11994,17 +11345,14 @@ class DeploymentResourceConfiguration(ResourceConfiguration): """ _attribute_map = { - 'instance_count': {'key': 'instanceCount', 'type': 'int'}, - 'instance_type': {'key': 'instanceType', 'type': 'str'}, - 'locations': {'key': 'locations', 'type': '[str]'}, - 'max_instance_count': {'key': 'maxInstanceCount', 'type': 'int'}, - 'properties': {'key': 'properties', 'type': '{object}'}, + "instance_count": {"key": "instanceCount", "type": "int"}, + "instance_type": {"key": "instanceType", "type": "str"}, + "locations": {"key": "locations", "type": "[str]"}, + "max_instance_count": {"key": "maxInstanceCount", "type": "int"}, + "properties": {"key": "properties", "type": "{object}"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword instance_count: Optional number of instances or nodes used by the compute target. :paramtype instance_count: int @@ -12034,15 +11382,12 @@ class DestinationAsset(msrest.serialization.Model): """ _attribute_map = { - 'destination_name': {'key': 'destinationName', 'type': 'str'}, - 'destination_version': {'key': 'destinationVersion', 'type': 'str'}, - 'registry_name': {'key': 'registryName', 'type': 'str'}, + "destination_name": {"key": "destinationName", "type": "str"}, + "destination_version": {"key": "destinationVersion", "type": "str"}, + "registry_name": {"key": "registryName", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword destination_name: Destination asset name. :paramtype destination_name: str @@ -12052,9 +11397,9 @@ def __init__( :paramtype registry_name: str """ super(DestinationAsset, self).__init__(**kwargs) - self.destination_name = kwargs.get('destination_name', None) - self.destination_version = kwargs.get('destination_version', None) - self.registry_name = kwargs.get('registry_name', None) + self.destination_name = kwargs.get("destination_name", None) + self.destination_version = kwargs.get("destination_version", None) + self.registry_name = kwargs.get("registry_name", None) class DiagnoseRequestProperties(msrest.serialization.Model): @@ -12084,22 +11429,19 @@ class DiagnoseRequestProperties(msrest.serialization.Model): """ _attribute_map = { - 'application_insights': {'key': 'applicationInsights', 'type': '{object}'}, - 'container_registry': {'key': 'containerRegistry', 'type': '{object}'}, - 'dns_resolution': {'key': 'dnsResolution', 'type': '{object}'}, - 'key_vault': {'key': 'keyVault', 'type': '{object}'}, - 'nsg': {'key': 'nsg', 'type': '{object}'}, - 'others': {'key': 'others', 'type': '{object}'}, - 'required_resource_providers': {'key': 'requiredResourceProviders', 'type': '{object}'}, - 'resource_lock': {'key': 'resourceLock', 'type': '{object}'}, - 'storage_account': {'key': 'storageAccount', 'type': '{object}'}, - 'udr': {'key': 'udr', 'type': '{object}'}, + "application_insights": {"key": "applicationInsights", "type": "{object}"}, + "container_registry": {"key": "containerRegistry", "type": "{object}"}, + "dns_resolution": {"key": "dnsResolution", "type": "{object}"}, + "key_vault": {"key": "keyVault", "type": "{object}"}, + "nsg": {"key": "nsg", "type": "{object}"}, + "others": {"key": "others", "type": "{object}"}, + "required_resource_providers": {"key": "requiredResourceProviders", "type": "{object}"}, + "resource_lock": {"key": "resourceLock", "type": "{object}"}, + "storage_account": {"key": "storageAccount", "type": "{object}"}, + "udr": {"key": "udr", "type": "{object}"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword application_insights: Setting for diagnosing dependent application insights. :paramtype application_insights: dict[str, any] @@ -12124,16 +11466,16 @@ def __init__( :paramtype udr: dict[str, any] """ super(DiagnoseRequestProperties, self).__init__(**kwargs) - self.application_insights = kwargs.get('application_insights', None) - self.container_registry = kwargs.get('container_registry', None) - self.dns_resolution = kwargs.get('dns_resolution', None) - self.key_vault = kwargs.get('key_vault', None) - self.nsg = kwargs.get('nsg', None) - self.others = kwargs.get('others', None) - self.required_resource_providers = kwargs.get('required_resource_providers', None) - self.resource_lock = kwargs.get('resource_lock', None) - self.storage_account = kwargs.get('storage_account', None) - self.udr = kwargs.get('udr', None) + self.application_insights = kwargs.get("application_insights", None) + self.container_registry = kwargs.get("container_registry", None) + self.dns_resolution = kwargs.get("dns_resolution", None) + self.key_vault = kwargs.get("key_vault", None) + self.nsg = kwargs.get("nsg", None) + self.others = kwargs.get("others", None) + self.required_resource_providers = kwargs.get("required_resource_providers", None) + self.resource_lock = kwargs.get("resource_lock", None) + self.storage_account = kwargs.get("storage_account", None) + self.udr = kwargs.get("udr", None) class DiagnoseResponseResult(msrest.serialization.Model): @@ -12144,19 +11486,16 @@ class DiagnoseResponseResult(msrest.serialization.Model): """ _attribute_map = { - 'value': {'key': 'value', 'type': 'DiagnoseResponseResultValue'}, + "value": {"key": "value", "type": "DiagnoseResponseResultValue"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword value: :paramtype value: ~azure.mgmt.machinelearningservices.models.DiagnoseResponseResultValue """ super(DiagnoseResponseResult, self).__init__(**kwargs) - self.value = kwargs.get('value', None) + self.value = kwargs.get("value", None) class DiagnoseResponseResultValue(msrest.serialization.Model): @@ -12189,21 +11528,18 @@ class DiagnoseResponseResultValue(msrest.serialization.Model): """ _attribute_map = { - 'user_defined_route_results': {'key': 'userDefinedRouteResults', 'type': '[DiagnoseResult]'}, - 'network_security_rule_results': {'key': 'networkSecurityRuleResults', 'type': '[DiagnoseResult]'}, - 'resource_lock_results': {'key': 'resourceLockResults', 'type': '[DiagnoseResult]'}, - 'dns_resolution_results': {'key': 'dnsResolutionResults', 'type': '[DiagnoseResult]'}, - 'storage_account_results': {'key': 'storageAccountResults', 'type': '[DiagnoseResult]'}, - 'key_vault_results': {'key': 'keyVaultResults', 'type': '[DiagnoseResult]'}, - 'container_registry_results': {'key': 'containerRegistryResults', 'type': '[DiagnoseResult]'}, - 'application_insights_results': {'key': 'applicationInsightsResults', 'type': '[DiagnoseResult]'}, - 'other_results': {'key': 'otherResults', 'type': '[DiagnoseResult]'}, + "user_defined_route_results": {"key": "userDefinedRouteResults", "type": "[DiagnoseResult]"}, + "network_security_rule_results": {"key": "networkSecurityRuleResults", "type": "[DiagnoseResult]"}, + "resource_lock_results": {"key": "resourceLockResults", "type": "[DiagnoseResult]"}, + "dns_resolution_results": {"key": "dnsResolutionResults", "type": "[DiagnoseResult]"}, + "storage_account_results": {"key": "storageAccountResults", "type": "[DiagnoseResult]"}, + "key_vault_results": {"key": "keyVaultResults", "type": "[DiagnoseResult]"}, + "container_registry_results": {"key": "containerRegistryResults", "type": "[DiagnoseResult]"}, + "application_insights_results": {"key": "applicationInsightsResults", "type": "[DiagnoseResult]"}, + "other_results": {"key": "otherResults", "type": "[DiagnoseResult]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword user_defined_route_results: :paramtype user_defined_route_results: @@ -12232,15 +11568,15 @@ def __init__( :paramtype other_results: list[~azure.mgmt.machinelearningservices.models.DiagnoseResult] """ super(DiagnoseResponseResultValue, self).__init__(**kwargs) - self.user_defined_route_results = kwargs.get('user_defined_route_results', None) - self.network_security_rule_results = kwargs.get('network_security_rule_results', None) - self.resource_lock_results = kwargs.get('resource_lock_results', None) - self.dns_resolution_results = kwargs.get('dns_resolution_results', None) - self.storage_account_results = kwargs.get('storage_account_results', None) - self.key_vault_results = kwargs.get('key_vault_results', None) - self.container_registry_results = kwargs.get('container_registry_results', None) - self.application_insights_results = kwargs.get('application_insights_results', None) - self.other_results = kwargs.get('other_results', None) + self.user_defined_route_results = kwargs.get("user_defined_route_results", None) + self.network_security_rule_results = kwargs.get("network_security_rule_results", None) + self.resource_lock_results = kwargs.get("resource_lock_results", None) + self.dns_resolution_results = kwargs.get("dns_resolution_results", None) + self.storage_account_results = kwargs.get("storage_account_results", None) + self.key_vault_results = kwargs.get("key_vault_results", None) + self.container_registry_results = kwargs.get("container_registry_results", None) + self.application_insights_results = kwargs.get("application_insights_results", None) + self.other_results = kwargs.get("other_results", None) class DiagnoseResult(msrest.serialization.Model): @@ -12258,23 +11594,19 @@ class DiagnoseResult(msrest.serialization.Model): """ _validation = { - 'code': {'readonly': True}, - 'level': {'readonly': True}, - 'message': {'readonly': True}, + "code": {"readonly": True}, + "level": {"readonly": True}, + "message": {"readonly": True}, } _attribute_map = { - 'code': {'key': 'code', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, - 'message': {'key': 'message', 'type': 'str'}, + "code": {"key": "code", "type": "str"}, + "level": {"key": "level", "type": "str"}, + "message": {"key": "message", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(DiagnoseResult, self).__init__(**kwargs) self.code = None self.level = None @@ -12289,19 +11621,16 @@ class DiagnoseWorkspaceParameters(msrest.serialization.Model): """ _attribute_map = { - 'value': {'key': 'value', 'type': 'DiagnoseRequestProperties'}, + "value": {"key": "value", "type": "DiagnoseRequestProperties"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword value: :paramtype value: ~azure.mgmt.machinelearningservices.models.DiagnoseRequestProperties """ super(DiagnoseWorkspaceParameters, self).__init__(**kwargs) - self.value = kwargs.get('value', None) + self.value = kwargs.get("value", None) class DistributionConfiguration(msrest.serialization.Model): @@ -12319,23 +11648,17 @@ class DistributionConfiguration(msrest.serialization.Model): """ _validation = { - 'distribution_type': {'required': True}, + "distribution_type": {"required": True}, } _attribute_map = { - 'distribution_type': {'key': 'distributionType', 'type': 'str'}, + "distribution_type": {"key": "distributionType", "type": "str"}, } - _subtype_map = { - 'distribution_type': {'Mpi': 'Mpi', 'PyTorch': 'PyTorch', 'Ray': 'Ray', 'TensorFlow': 'TensorFlow'} - } + _subtype_map = {"distribution_type": {"Mpi": "Mpi", "PyTorch": "PyTorch", "Ray": "Ray", "TensorFlow": "TensorFlow"}} - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(DistributionConfiguration, self).__init__(**kwargs) self.distribution_type = None # type: Optional[str] @@ -12351,14 +11674,11 @@ class Docker(msrest.serialization.Model): """ _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'privileged': {'key': 'privileged', 'type': 'bool'}, + "additional_properties": {"key": "", "type": "{object}"}, + "privileged": {"key": "privileged", "type": "bool"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. @@ -12367,8 +11687,8 @@ def __init__( :paramtype privileged: bool """ super(Docker, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.privileged = kwargs.get('privileged', None) + self.additional_properties = kwargs.get("additional_properties", None) + self.privileged = kwargs.get("privileged", None) class DockerCredential(DataReferenceCredential): @@ -12388,19 +11708,16 @@ class DockerCredential(DataReferenceCredential): """ _validation = { - 'credential_type': {'required': True}, + "credential_type": {"required": True}, } _attribute_map = { - 'credential_type': {'key': 'credentialType', 'type': 'str'}, - 'password': {'key': 'password', 'type': 'str'}, - 'user_name': {'key': 'userName', 'type': 'str'}, + "credential_type": {"key": "credentialType", "type": "str"}, + "password": {"key": "password", "type": "str"}, + "user_name": {"key": "userName", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword password: DockerCredential user password. :paramtype password: str @@ -12408,9 +11725,9 @@ def __init__( :paramtype user_name: str """ super(DockerCredential, self).__init__(**kwargs) - self.credential_type = 'DockerCredentials' # type: str - self.password = kwargs.get('password', None) - self.user_name = kwargs.get('user_name', None) + self.credential_type = "DockerCredentials" # type: str + self.password = kwargs.get("password", None) + self.user_name = kwargs.get("user_name", None) class EncryptionKeyVaultUpdateProperties(msrest.serialization.Model): @@ -12423,23 +11740,20 @@ class EncryptionKeyVaultUpdateProperties(msrest.serialization.Model): """ _validation = { - 'key_identifier': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "key_identifier": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'key_identifier': {'key': 'keyIdentifier', 'type': 'str'}, + "key_identifier": {"key": "keyIdentifier", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword key_identifier: Required. :paramtype key_identifier: str """ super(EncryptionKeyVaultUpdateProperties, self).__init__(**kwargs) - self.key_identifier = kwargs['key_identifier'] + self.key_identifier = kwargs["key_identifier"] class EncryptionProperty(msrest.serialization.Model): @@ -12469,23 +11783,20 @@ class EncryptionProperty(msrest.serialization.Model): """ _validation = { - 'key_vault_properties': {'required': True}, - 'status': {'required': True}, + "key_vault_properties": {"required": True}, + "status": {"required": True}, } _attribute_map = { - 'cosmos_db_resource_id': {'key': 'cosmosDbResourceId', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'IdentityForCmk'}, - 'key_vault_properties': {'key': 'keyVaultProperties', 'type': 'KeyVaultProperties'}, - 'search_account_resource_id': {'key': 'searchAccountResourceId', 'type': 'str'}, - 'status': {'key': 'status', 'type': 'str'}, - 'storage_account_resource_id': {'key': 'storageAccountResourceId', 'type': 'str'}, + "cosmos_db_resource_id": {"key": "cosmosDbResourceId", "type": "str"}, + "identity": {"key": "identity", "type": "IdentityForCmk"}, + "key_vault_properties": {"key": "keyVaultProperties", "type": "KeyVaultProperties"}, + "search_account_resource_id": {"key": "searchAccountResourceId", "type": "str"}, + "status": {"key": "status", "type": "str"}, + "storage_account_resource_id": {"key": "storageAccountResourceId", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword cosmos_db_resource_id: The byok cosmosdb account that customer brings to store customer's data @@ -12508,12 +11819,12 @@ def __init__( :paramtype storage_account_resource_id: str """ super(EncryptionProperty, self).__init__(**kwargs) - self.cosmos_db_resource_id = kwargs.get('cosmos_db_resource_id', None) - self.identity = kwargs.get('identity', None) - self.key_vault_properties = kwargs['key_vault_properties'] - self.search_account_resource_id = kwargs.get('search_account_resource_id', None) - self.status = kwargs['status'] - self.storage_account_resource_id = kwargs.get('storage_account_resource_id', None) + self.cosmos_db_resource_id = kwargs.get("cosmos_db_resource_id", None) + self.identity = kwargs.get("identity", None) + self.key_vault_properties = kwargs["key_vault_properties"] + self.search_account_resource_id = kwargs.get("search_account_resource_id", None) + self.status = kwargs["status"] + self.storage_account_resource_id = kwargs.get("storage_account_resource_id", None) class EncryptionUpdateProperties(msrest.serialization.Model): @@ -12527,24 +11838,21 @@ class EncryptionUpdateProperties(msrest.serialization.Model): """ _validation = { - 'key_vault_properties': {'required': True}, + "key_vault_properties": {"required": True}, } _attribute_map = { - 'key_vault_properties': {'key': 'keyVaultProperties', 'type': 'EncryptionKeyVaultUpdateProperties'}, + "key_vault_properties": {"key": "keyVaultProperties", "type": "EncryptionKeyVaultUpdateProperties"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword key_vault_properties: Required. :paramtype key_vault_properties: ~azure.mgmt.machinelearningservices.models.EncryptionKeyVaultUpdateProperties """ super(EncryptionUpdateProperties, self).__init__(**kwargs) - self.key_vault_properties = kwargs['key_vault_properties'] + self.key_vault_properties = kwargs["key_vault_properties"] class Endpoint(msrest.serialization.Model): @@ -12564,17 +11872,14 @@ class Endpoint(msrest.serialization.Model): """ _attribute_map = { - 'protocol': {'key': 'protocol', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'target': {'key': 'target', 'type': 'int'}, - 'published': {'key': 'published', 'type': 'int'}, - 'host_ip': {'key': 'hostIp', 'type': 'str'}, + "protocol": {"key": "protocol", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "target": {"key": "target", "type": "int"}, + "published": {"key": "published", "type": "int"}, + "host_ip": {"key": "hostIp", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword protocol: Protocol over which communication will happen over this endpoint. Possible values include: "tcp", "udp", "http". Default value: "tcp". @@ -12589,11 +11894,11 @@ def __init__( :paramtype host_ip: str """ super(Endpoint, self).__init__(**kwargs) - self.protocol = kwargs.get('protocol', "tcp") - self.name = kwargs.get('name', None) - self.target = kwargs.get('target', None) - self.published = kwargs.get('published', None) - self.host_ip = kwargs.get('host_ip', None) + self.protocol = kwargs.get("protocol", "tcp") + self.name = kwargs.get("name", None) + self.target = kwargs.get("target", None) + self.published = kwargs.get("published", None) + self.host_ip = kwargs.get("host_ip", None) class EndpointAuthKeys(msrest.serialization.Model): @@ -12606,14 +11911,11 @@ class EndpointAuthKeys(msrest.serialization.Model): """ _attribute_map = { - 'primary_key': {'key': 'primaryKey', 'type': 'str'}, - 'secondary_key': {'key': 'secondaryKey', 'type': 'str'}, + "primary_key": {"key": "primaryKey", "type": "str"}, + "secondary_key": {"key": "secondaryKey", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword primary_key: The primary key. :paramtype primary_key: str @@ -12621,8 +11923,8 @@ def __init__( :paramtype secondary_key: str """ super(EndpointAuthKeys, self).__init__(**kwargs) - self.primary_key = kwargs.get('primary_key', None) - self.secondary_key = kwargs.get('secondary_key', None) + self.primary_key = kwargs.get("primary_key", None) + self.secondary_key = kwargs.get("secondary_key", None) class EndpointAuthToken(msrest.serialization.Model): @@ -12639,16 +11941,13 @@ class EndpointAuthToken(msrest.serialization.Model): """ _attribute_map = { - 'access_token': {'key': 'accessToken', 'type': 'str'}, - 'expiry_time_utc': {'key': 'expiryTimeUtc', 'type': 'long'}, - 'refresh_after_time_utc': {'key': 'refreshAfterTimeUtc', 'type': 'long'}, - 'token_type': {'key': 'tokenType', 'type': 'str'}, + "access_token": {"key": "accessToken", "type": "str"}, + "expiry_time_utc": {"key": "expiryTimeUtc", "type": "long"}, + "refresh_after_time_utc": {"key": "refreshAfterTimeUtc", "type": "long"}, + "token_type": {"key": "tokenType", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword access_token: Access token for endpoint authentication. :paramtype access_token: str @@ -12660,10 +11959,10 @@ def __init__( :paramtype token_type: str """ super(EndpointAuthToken, self).__init__(**kwargs) - self.access_token = kwargs.get('access_token', None) - self.expiry_time_utc = kwargs.get('expiry_time_utc', 0) - self.refresh_after_time_utc = kwargs.get('refresh_after_time_utc', 0) - self.token_type = kwargs.get('token_type', None) + self.access_token = kwargs.get("access_token", None) + self.expiry_time_utc = kwargs.get("expiry_time_utc", 0) + self.refresh_after_time_utc = kwargs.get("refresh_after_time_utc", 0) + self.token_type = kwargs.get("token_type", None) class EndpointDeploymentModel(msrest.serialization.Model): @@ -12680,16 +11979,13 @@ class EndpointDeploymentModel(msrest.serialization.Model): """ _attribute_map = { - 'format': {'key': 'format', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'source': {'key': 'source', 'type': 'str'}, - 'version': {'key': 'version', 'type': 'str'}, + "format": {"key": "format", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "source": {"key": "source", "type": "str"}, + "version": {"key": "version", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword format: Model format. :paramtype format: str @@ -12701,10 +11997,10 @@ def __init__( :paramtype version: str """ super(EndpointDeploymentModel, self).__init__(**kwargs) - self.format = kwargs.get('format', None) - self.name = kwargs.get('name', None) - self.source = kwargs.get('source', None) - self.version = kwargs.get('version', None) + self.format = kwargs.get("format", None) + self.name = kwargs.get("name", None) + self.source = kwargs.get("source", None) + self.version = kwargs.get("version", None) class EndpointDeploymentResourcePropertiesBasicResource(Resource): @@ -12731,32 +12027,29 @@ class EndpointDeploymentResourcePropertiesBasicResource(Resource): """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'properties': {'key': 'properties', 'type': 'EndpointDeploymentResourceProperties'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "EndpointDeploymentResourceProperties"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword properties: Required. :paramtype properties: ~azure.mgmt.machinelearningservices.models.EndpointDeploymentResourceProperties """ super(EndpointDeploymentResourcePropertiesBasicResource, self).__init__(**kwargs) - self.properties = kwargs['properties'] + self.properties = kwargs["properties"] class EndpointDeploymentResourcePropertiesBasicResourceArmPaginatedResult(msrest.serialization.Model): @@ -12770,14 +12063,11 @@ class EndpointDeploymentResourcePropertiesBasicResourceArmPaginatedResult(msrest """ _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[EndpointDeploymentResourcePropertiesBasicResource]'}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[EndpointDeploymentResourcePropertiesBasicResource]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword next_link: :paramtype next_link: str @@ -12786,8 +12076,8 @@ def __init__( list[~azure.mgmt.machinelearningservices.models.EndpointDeploymentResourcePropertiesBasicResource] """ super(EndpointDeploymentResourcePropertiesBasicResourceArmPaginatedResult, self).__init__(**kwargs) - self.next_link = kwargs.get('next_link', None) - self.value = kwargs.get('value', None) + self.next_link = kwargs.get("next_link", None) + self.value = kwargs.get("value", None) class EndpointKeys(msrest.serialization.Model): @@ -12798,19 +12088,16 @@ class EndpointKeys(msrest.serialization.Model): """ _attribute_map = { - 'keys': {'key': 'keys', 'type': 'AccountApiKeys'}, + "keys": {"key": "keys", "type": "AccountApiKeys"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword keys: Dictionary of Keys for the endpoint. :paramtype keys: ~azure.mgmt.machinelearningservices.models.AccountApiKeys """ super(EndpointKeys, self).__init__(**kwargs) - self.keys = kwargs.get('keys', None) + self.keys = kwargs.get("keys", None) class EndpointModels(msrest.serialization.Model): @@ -12824,14 +12111,11 @@ class EndpointModels(msrest.serialization.Model): """ _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[AccountModel]'}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[AccountModel]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword next_link: The link to the next page constructed using the continuationToken. If null, there are no additional pages. @@ -12840,8 +12124,8 @@ def __init__( :paramtype value: list[~azure.mgmt.machinelearningservices.models.AccountModel] """ super(EndpointModels, self).__init__(**kwargs) - self.next_link = kwargs.get('next_link', None) - self.value = kwargs.get('value', None) + self.next_link = kwargs.get("next_link", None) + self.value = kwargs.get("value", None) class EndpointResourcePropertiesBasicResource(Resource): @@ -12867,31 +12151,28 @@ class EndpointResourcePropertiesBasicResource(Resource): """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'properties': {'key': 'properties', 'type': 'EndpointResourceProperties'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "EndpointResourceProperties"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword properties: Required. :paramtype properties: ~azure.mgmt.machinelearningservices.models.EndpointResourceProperties """ super(EndpointResourcePropertiesBasicResource, self).__init__(**kwargs) - self.properties = kwargs['properties'] + self.properties = kwargs["properties"] class EndpointResourcePropertiesBasicResourceArmPaginatedResult(msrest.serialization.Model): @@ -12905,14 +12186,11 @@ class EndpointResourcePropertiesBasicResourceArmPaginatedResult(msrest.serializa """ _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[EndpointResourcePropertiesBasicResource]'}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[EndpointResourcePropertiesBasicResource]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword next_link: :paramtype next_link: str @@ -12921,8 +12199,8 @@ def __init__( list[~azure.mgmt.machinelearningservices.models.EndpointResourcePropertiesBasicResource] """ super(EndpointResourcePropertiesBasicResourceArmPaginatedResult, self).__init__(**kwargs) - self.next_link = kwargs.get('next_link', None) - self.value = kwargs.get('value', None) + self.next_link = kwargs.get("next_link", None) + self.value = kwargs.get("value", None) class EndpointScheduleAction(ScheduleActionBase): @@ -12945,19 +12223,16 @@ class EndpointScheduleAction(ScheduleActionBase): """ _validation = { - 'action_type': {'required': True}, - 'endpoint_invocation_definition': {'required': True}, + "action_type": {"required": True}, + "endpoint_invocation_definition": {"required": True}, } _attribute_map = { - 'action_type': {'key': 'actionType', 'type': 'str'}, - 'endpoint_invocation_definition': {'key': 'endpointInvocationDefinition', 'type': 'object'}, + "action_type": {"key": "actionType", "type": "str"}, + "endpoint_invocation_definition": {"key": "endpointInvocationDefinition", "type": "object"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword endpoint_invocation_definition: Required. [Required] Defines Schedule action definition details. @@ -12969,8 +12244,8 @@ def __init__( :paramtype endpoint_invocation_definition: any """ super(EndpointScheduleAction, self).__init__(**kwargs) - self.action_type = 'InvokeBatchEndpoint' # type: str - self.endpoint_invocation_definition = kwargs['endpoint_invocation_definition'] + self.action_type = "InvokeBatchEndpoint" # type: str + self.endpoint_invocation_definition = kwargs["endpoint_invocation_definition"] class EnvironmentContainer(ProxyResource): @@ -12996,32 +12271,29 @@ class EnvironmentContainer(ProxyResource): """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'properties': {'key': 'properties', 'type': 'EnvironmentContainerProperties'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "EnvironmentContainerProperties"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword properties: Required. [Required] Additional attributes of the entity. :paramtype properties: ~azure.mgmt.machinelearningservices.models.EnvironmentContainerProperties """ super(EnvironmentContainer, self).__init__(**kwargs) - self.properties = kwargs['properties'] + self.properties = kwargs["properties"] class EnvironmentContainerProperties(AssetContainer): @@ -13048,25 +12320,22 @@ class EnvironmentContainerProperties(AssetContainer): """ _validation = { - 'latest_version': {'readonly': True}, - 'next_version': {'readonly': True}, - 'provisioning_state': {'readonly': True}, + "latest_version": {"readonly": True}, + "next_version": {"readonly": True}, + "provisioning_state": {"readonly": True}, } _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'latest_version': {'key': 'latestVersion', 'type': 'str'}, - 'next_version': {'key': 'nextVersion', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "latest_version": {"key": "latestVersion", "type": "str"}, + "next_version": {"key": "nextVersion", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword description: The asset description text. :paramtype description: str @@ -13092,14 +12361,11 @@ class EnvironmentContainerResourceArmPaginatedResult(msrest.serialization.Model) """ _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[EnvironmentContainer]'}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[EnvironmentContainer]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword next_link: The link to the next page of EnvironmentContainer objects. If null, there are no additional pages. @@ -13108,8 +12374,8 @@ def __init__( :paramtype value: list[~azure.mgmt.machinelearningservices.models.EnvironmentContainer] """ super(EnvironmentContainerResourceArmPaginatedResult, self).__init__(**kwargs) - self.next_link = kwargs.get('next_link', None) - self.value = kwargs.get('value', None) + self.next_link = kwargs.get("next_link", None) + self.value = kwargs.get("value", None) class EnvironmentVariable(msrest.serialization.Model): @@ -13126,15 +12392,12 @@ class EnvironmentVariable(msrest.serialization.Model): """ _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'str'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "value": {"key": "value", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. @@ -13146,9 +12409,9 @@ def __init__( :paramtype value: str """ super(EnvironmentVariable, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = kwargs.get('type', "local") - self.value = kwargs.get('value', None) + self.additional_properties = kwargs.get("additional_properties", None) + self.type = kwargs.get("type", "local") + self.value = kwargs.get("value", None) class EnvironmentVersion(ProxyResource): @@ -13174,31 +12437,28 @@ class EnvironmentVersion(ProxyResource): """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'properties': {'key': 'properties', 'type': 'EnvironmentVersionProperties'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "EnvironmentVersionProperties"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword properties: Required. [Required] Additional attributes of the entity. :paramtype properties: ~azure.mgmt.machinelearningservices.models.EnvironmentVersionProperties """ super(EnvironmentVersion, self).__init__(**kwargs) - self.properties = kwargs['properties'] + self.properties = kwargs["properties"] class EnvironmentVersionProperties(AssetBase): @@ -13271,33 +12531,30 @@ class EnvironmentVersionProperties(AssetBase): """ _validation = { - 'environment_type': {'readonly': True}, - 'provisioning_state': {'readonly': True}, + "environment_type": {"readonly": True}, + "provisioning_state": {"readonly": True}, } _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'auto_delete_setting': {'key': 'autoDeleteSetting', 'type': 'AutoDeleteSetting'}, - 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'auto_rebuild': {'key': 'autoRebuild', 'type': 'str'}, - 'build': {'key': 'build', 'type': 'BuildContext'}, - 'conda_file': {'key': 'condaFile', 'type': 'str'}, - 'environment_type': {'key': 'environmentType', 'type': 'str'}, - 'image': {'key': 'image', 'type': 'str'}, - 'inference_config': {'key': 'inferenceConfig', 'type': 'InferenceContainerProperties'}, - 'intellectual_property': {'key': 'intellectualProperty', 'type': 'IntellectualProperty'}, - 'os_type': {'key': 'osType', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'stage': {'key': 'stage', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "auto_delete_setting": {"key": "autoDeleteSetting", "type": "AutoDeleteSetting"}, + "is_anonymous": {"key": "isAnonymous", "type": "bool"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "auto_rebuild": {"key": "autoRebuild", "type": "str"}, + "build": {"key": "build", "type": "BuildContext"}, + "conda_file": {"key": "condaFile", "type": "str"}, + "environment_type": {"key": "environmentType", "type": "str"}, + "image": {"key": "image", "type": "str"}, + "inference_config": {"key": "inferenceConfig", "type": "InferenceContainerProperties"}, + "intellectual_property": {"key": "intellectualProperty", "type": "IntellectualProperty"}, + "os_type": {"key": "osType", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "stage": {"key": "stage", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword description: The asset description text. :paramtype description: str @@ -13350,16 +12607,16 @@ def __init__( :paramtype stage: str """ super(EnvironmentVersionProperties, self).__init__(**kwargs) - self.auto_rebuild = kwargs.get('auto_rebuild', None) - self.build = kwargs.get('build', None) - self.conda_file = kwargs.get('conda_file', None) + self.auto_rebuild = kwargs.get("auto_rebuild", None) + self.build = kwargs.get("build", None) + self.conda_file = kwargs.get("conda_file", None) self.environment_type = None - self.image = kwargs.get('image', None) - self.inference_config = kwargs.get('inference_config', None) - self.intellectual_property = kwargs.get('intellectual_property', None) - self.os_type = kwargs.get('os_type', None) + self.image = kwargs.get("image", None) + self.inference_config = kwargs.get("inference_config", None) + self.intellectual_property = kwargs.get("intellectual_property", None) + self.os_type = kwargs.get("os_type", None) self.provisioning_state = None - self.stage = kwargs.get('stage', None) + self.stage = kwargs.get("stage", None) class EnvironmentVersionResourceArmPaginatedResult(msrest.serialization.Model): @@ -13373,14 +12630,11 @@ class EnvironmentVersionResourceArmPaginatedResult(msrest.serialization.Model): """ _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[EnvironmentVersion]'}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[EnvironmentVersion]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword next_link: The link to the next page of EnvironmentVersion objects. If null, there are no additional pages. @@ -13389,8 +12643,8 @@ def __init__( :paramtype value: list[~azure.mgmt.machinelearningservices.models.EnvironmentVersion] """ super(EnvironmentVersionResourceArmPaginatedResult, self).__init__(**kwargs) - self.next_link = kwargs.get('next_link', None) - self.value = kwargs.get('value', None) + self.next_link = kwargs.get("next_link", None) + self.value = kwargs.get("value", None) class ErrorAdditionalInfo(msrest.serialization.Model): @@ -13405,21 +12659,17 @@ class ErrorAdditionalInfo(msrest.serialization.Model): """ _validation = { - 'type': {'readonly': True}, - 'info': {'readonly': True}, + "type": {"readonly": True}, + "info": {"readonly": True}, } _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'info': {'key': 'info', 'type': 'object'}, + "type": {"key": "type", "type": "str"}, + "info": {"key": "info", "type": "object"}, } - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(ErrorAdditionalInfo, self).__init__(**kwargs) self.type = None self.info = None @@ -13443,27 +12693,23 @@ class ErrorDetail(msrest.serialization.Model): """ _validation = { - 'code': {'readonly': True}, - 'message': {'readonly': True}, - 'target': {'readonly': True}, - 'details': {'readonly': True}, - 'additional_info': {'readonly': True}, + "code": {"readonly": True}, + "message": {"readonly": True}, + "target": {"readonly": True}, + "details": {"readonly": True}, + "additional_info": {"readonly": True}, } _attribute_map = { - 'code': {'key': 'code', 'type': 'str'}, - 'message': {'key': 'message', 'type': 'str'}, - 'target': {'key': 'target', 'type': 'str'}, - 'details': {'key': 'details', 'type': '[ErrorDetail]'}, - 'additional_info': {'key': 'additionalInfo', 'type': '[ErrorAdditionalInfo]'}, + "code": {"key": "code", "type": "str"}, + "message": {"key": "message", "type": "str"}, + "target": {"key": "target", "type": "str"}, + "details": {"key": "details", "type": "[ErrorDetail]"}, + "additional_info": {"key": "additionalInfo", "type": "[ErrorAdditionalInfo]"}, } - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(ErrorDetail, self).__init__(**kwargs) self.code = None self.message = None @@ -13480,19 +12726,16 @@ class ErrorResponse(msrest.serialization.Model): """ _attribute_map = { - 'error': {'key': 'error', 'type': 'ErrorDetail'}, + "error": {"key": "error", "type": "ErrorDetail"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword error: The error object. :paramtype error: ~azure.mgmt.machinelearningservices.models.ErrorDetail """ super(ErrorResponse, self).__init__(**kwargs) - self.error = kwargs.get('error', None) + self.error = kwargs.get("error", None) class EstimatedVMPrice(msrest.serialization.Model): @@ -13511,21 +12754,18 @@ class EstimatedVMPrice(msrest.serialization.Model): """ _validation = { - 'retail_price': {'required': True}, - 'os_type': {'required': True}, - 'vm_tier': {'required': True}, + "retail_price": {"required": True}, + "os_type": {"required": True}, + "vm_tier": {"required": True}, } _attribute_map = { - 'retail_price': {'key': 'retailPrice', 'type': 'float'}, - 'os_type': {'key': 'osType', 'type': 'str'}, - 'vm_tier': {'key': 'vmTier', 'type': 'str'}, + "retail_price": {"key": "retailPrice", "type": "float"}, + "os_type": {"key": "osType", "type": "str"}, + "vm_tier": {"key": "vmTier", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword retail_price: Required. The price charged for using the VM. :paramtype retail_price: float @@ -13537,9 +12777,9 @@ def __init__( :paramtype vm_tier: str or ~azure.mgmt.machinelearningservices.models.VMTier """ super(EstimatedVMPrice, self).__init__(**kwargs) - self.retail_price = kwargs['retail_price'] - self.os_type = kwargs['os_type'] - self.vm_tier = kwargs['vm_tier'] + self.retail_price = kwargs["retail_price"] + self.os_type = kwargs["os_type"] + self.vm_tier = kwargs["vm_tier"] class EstimatedVMPrices(msrest.serialization.Model): @@ -13559,21 +12799,18 @@ class EstimatedVMPrices(msrest.serialization.Model): """ _validation = { - 'billing_currency': {'required': True}, - 'unit_of_measure': {'required': True}, - 'values': {'required': True}, + "billing_currency": {"required": True}, + "unit_of_measure": {"required": True}, + "values": {"required": True}, } _attribute_map = { - 'billing_currency': {'key': 'billingCurrency', 'type': 'str'}, - 'unit_of_measure': {'key': 'unitOfMeasure', 'type': 'str'}, - 'values': {'key': 'values', 'type': '[EstimatedVMPrice]'}, + "billing_currency": {"key": "billingCurrency", "type": "str"}, + "unit_of_measure": {"key": "unitOfMeasure", "type": "str"}, + "values": {"key": "values", "type": "[EstimatedVMPrice]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword billing_currency: Required. Three lettered code specifying the currency of the VM price. Example: USD. Possible values include: "USD". @@ -13586,9 +12823,9 @@ def __init__( :paramtype values: list[~azure.mgmt.machinelearningservices.models.EstimatedVMPrice] """ super(EstimatedVMPrices, self).__init__(**kwargs) - self.billing_currency = kwargs['billing_currency'] - self.unit_of_measure = kwargs['unit_of_measure'] - self.values = kwargs['values'] + self.billing_currency = kwargs["billing_currency"] + self.unit_of_measure = kwargs["unit_of_measure"] + self.values = kwargs["values"] class ExternalFQDNResponse(msrest.serialization.Model): @@ -13599,19 +12836,16 @@ class ExternalFQDNResponse(msrest.serialization.Model): """ _attribute_map = { - 'value': {'key': 'value', 'type': '[FQDNEndpointsPropertyBag]'}, + "value": {"key": "value", "type": "[FQDNEndpointsPropertyBag]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword value: :paramtype value: list[~azure.mgmt.machinelearningservices.models.FQDNEndpointsPropertyBag] """ super(ExternalFQDNResponse, self).__init__(**kwargs) - self.value = kwargs.get('value', None) + self.value = kwargs.get("value", None) class Feature(ProxyResource): @@ -13637,31 +12871,28 @@ class Feature(ProxyResource): """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'properties': {'key': 'properties', 'type': 'FeatureProperties'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "FeatureProperties"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword properties: Required. [Required] Additional attributes of the entity. :paramtype properties: ~azure.mgmt.machinelearningservices.models.FeatureProperties """ super(Feature, self).__init__(**kwargs) - self.properties = kwargs['properties'] + self.properties = kwargs["properties"] class FeatureAttributionDriftMonitoringSignal(MonitoringSignalBase): @@ -13699,28 +12930,25 @@ class FeatureAttributionDriftMonitoringSignal(MonitoringSignalBase): """ _validation = { - 'signal_type': {'required': True}, - 'feature_importance_settings': {'required': True}, - 'metric_threshold': {'required': True}, - 'production_data': {'required': True}, - 'reference_data': {'required': True}, + "signal_type": {"required": True}, + "feature_importance_settings": {"required": True}, + "metric_threshold": {"required": True}, + "production_data": {"required": True}, + "reference_data": {"required": True}, } _attribute_map = { - 'notification_types': {'key': 'notificationTypes', 'type': '[str]'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'signal_type': {'key': 'signalType', 'type': 'str'}, - 'feature_data_type_override': {'key': 'featureDataTypeOverride', 'type': '{str}'}, - 'feature_importance_settings': {'key': 'featureImportanceSettings', 'type': 'FeatureImportanceSettings'}, - 'metric_threshold': {'key': 'metricThreshold', 'type': 'FeatureAttributionMetricThreshold'}, - 'production_data': {'key': 'productionData', 'type': '[MonitoringInputDataBase]'}, - 'reference_data': {'key': 'referenceData', 'type': 'MonitoringInputDataBase'}, + "notification_types": {"key": "notificationTypes", "type": "[str]"}, + "properties": {"key": "properties", "type": "{str}"}, + "signal_type": {"key": "signalType", "type": "str"}, + "feature_data_type_override": {"key": "featureDataTypeOverride", "type": "{str}"}, + "feature_importance_settings": {"key": "featureImportanceSettings", "type": "FeatureImportanceSettings"}, + "metric_threshold": {"key": "metricThreshold", "type": "FeatureAttributionMetricThreshold"}, + "production_data": {"key": "productionData", "type": "[MonitoringInputDataBase]"}, + "reference_data": {"key": "referenceData", "type": "MonitoringInputDataBase"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword notification_types: The current notification mode for this signal. :paramtype notification_types: list[str or @@ -13746,12 +12974,12 @@ def __init__( :paramtype reference_data: ~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase """ super(FeatureAttributionDriftMonitoringSignal, self).__init__(**kwargs) - self.signal_type = 'FeatureAttributionDrift' # type: str - self.feature_data_type_override = kwargs.get('feature_data_type_override', None) - self.feature_importance_settings = kwargs['feature_importance_settings'] - self.metric_threshold = kwargs['metric_threshold'] - self.production_data = kwargs['production_data'] - self.reference_data = kwargs['reference_data'] + self.signal_type = "FeatureAttributionDrift" # type: str + self.feature_data_type_override = kwargs.get("feature_data_type_override", None) + self.feature_importance_settings = kwargs["feature_importance_settings"] + self.metric_threshold = kwargs["metric_threshold"] + self.production_data = kwargs["production_data"] + self.reference_data = kwargs["reference_data"] class FeatureAttributionMetricThreshold(msrest.serialization.Model): @@ -13768,18 +12996,15 @@ class FeatureAttributionMetricThreshold(msrest.serialization.Model): """ _validation = { - 'metric': {'required': True}, + "metric": {"required": True}, } _attribute_map = { - 'metric': {'key': 'metric', 'type': 'str'}, - 'threshold': {'key': 'threshold', 'type': 'MonitoringThreshold'}, + "metric": {"key": "metric", "type": "str"}, + "threshold": {"key": "threshold", "type": "MonitoringThreshold"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword metric: Required. [Required] The feature attribution metric to calculate. Possible values include: "NormalizedDiscountedCumulativeGain". @@ -13789,8 +13014,8 @@ def __init__( :paramtype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold """ super(FeatureAttributionMetricThreshold, self).__init__(**kwargs) - self.metric = kwargs['metric'] - self.threshold = kwargs.get('threshold', None) + self.metric = kwargs["metric"] + self.threshold = kwargs.get("threshold", None) class FeatureImportanceSettings(msrest.serialization.Model): @@ -13804,14 +13029,11 @@ class FeatureImportanceSettings(msrest.serialization.Model): """ _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - 'target_column': {'key': 'targetColumn', 'type': 'str'}, + "mode": {"key": "mode", "type": "str"}, + "target_column": {"key": "targetColumn", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword mode: The mode of operation for computing feature importance. Possible values include: "Disabled", "Enabled". @@ -13820,8 +13042,8 @@ def __init__( :paramtype target_column: str """ super(FeatureImportanceSettings, self).__init__(**kwargs) - self.mode = kwargs.get('mode', None) - self.target_column = kwargs.get('target_column', None) + self.mode = kwargs.get("mode", None) + self.target_column = kwargs.get("target_column", None) class FeatureProperties(ResourceBase): @@ -13841,17 +13063,14 @@ class FeatureProperties(ResourceBase): """ _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'feature_name': {'key': 'featureName', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "data_type": {"key": "dataType", "type": "str"}, + "feature_name": {"key": "featureName", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword description: The asset description text. :paramtype description: str @@ -13866,8 +13085,8 @@ def __init__( :paramtype feature_name: str """ super(FeatureProperties, self).__init__(**kwargs) - self.data_type = kwargs.get('data_type', None) - self.feature_name = kwargs.get('feature_name', None) + self.data_type = kwargs.get("data_type", None) + self.feature_name = kwargs.get("feature_name", None) class FeatureResourceArmPaginatedResult(msrest.serialization.Model): @@ -13881,14 +13100,11 @@ class FeatureResourceArmPaginatedResult(msrest.serialization.Model): """ _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[Feature]'}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[Feature]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword next_link: The link to the next page of Feature objects. If null, there are no additional pages. @@ -13897,8 +13113,8 @@ def __init__( :paramtype value: list[~azure.mgmt.machinelearningservices.models.Feature] """ super(FeatureResourceArmPaginatedResult, self).__init__(**kwargs) - self.next_link = kwargs.get('next_link', None) - self.value = kwargs.get('value', None) + self.next_link = kwargs.get("next_link", None) + self.value = kwargs.get("value", None) class FeaturesetContainer(ProxyResource): @@ -13924,31 +13140,28 @@ class FeaturesetContainer(ProxyResource): """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'properties': {'key': 'properties', 'type': 'FeaturesetContainerProperties'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "FeaturesetContainerProperties"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword properties: Required. [Required] Additional attributes of the entity. :paramtype properties: ~azure.mgmt.machinelearningservices.models.FeaturesetContainerProperties """ super(FeaturesetContainer, self).__init__(**kwargs) - self.properties = kwargs['properties'] + self.properties = kwargs["properties"] class FeaturesetContainerProperties(AssetContainer): @@ -13975,25 +13188,22 @@ class FeaturesetContainerProperties(AssetContainer): """ _validation = { - 'latest_version': {'readonly': True}, - 'next_version': {'readonly': True}, - 'provisioning_state': {'readonly': True}, + "latest_version": {"readonly": True}, + "next_version": {"readonly": True}, + "provisioning_state": {"readonly": True}, } _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'latest_version': {'key': 'latestVersion', 'type': 'str'}, - 'next_version': {'key': 'nextVersion', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "latest_version": {"key": "latestVersion", "type": "str"}, + "next_version": {"key": "nextVersion", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword description: The asset description text. :paramtype description: str @@ -14019,14 +13229,11 @@ class FeaturesetContainerResourceArmPaginatedResult(msrest.serialization.Model): """ _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[FeaturesetContainer]'}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[FeaturesetContainer]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword next_link: The link to the next page of FeaturesetContainer objects. If null, there are no additional pages. @@ -14035,8 +13242,8 @@ def __init__( :paramtype value: list[~azure.mgmt.machinelearningservices.models.FeaturesetContainer] """ super(FeaturesetContainerResourceArmPaginatedResult, self).__init__(**kwargs) - self.next_link = kwargs.get('next_link', None) - self.value = kwargs.get('value', None) + self.next_link = kwargs.get("next_link", None) + self.value = kwargs.get("value", None) class FeaturesetSpecification(msrest.serialization.Model): @@ -14047,19 +13254,16 @@ class FeaturesetSpecification(msrest.serialization.Model): """ _attribute_map = { - 'path': {'key': 'path', 'type': 'str'}, + "path": {"key": "path", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword path: Specifies the spec path. :paramtype path: str """ super(FeaturesetSpecification, self).__init__(**kwargs) - self.path = kwargs.get('path', None) + self.path = kwargs.get("path", None) class FeaturesetVersion(ProxyResource): @@ -14085,31 +13289,28 @@ class FeaturesetVersion(ProxyResource): """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'properties': {'key': 'properties', 'type': 'FeaturesetVersionProperties'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "FeaturesetVersionProperties"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword properties: Required. [Required] Additional attributes of the entity. :paramtype properties: ~azure.mgmt.machinelearningservices.models.FeaturesetVersionProperties """ super(FeaturesetVersion, self).__init__(**kwargs) - self.properties = kwargs['properties'] + self.properties = kwargs["properties"] class FeaturesetVersionBackfillRequest(msrest.serialization.Model): @@ -14138,21 +13339,18 @@ class FeaturesetVersionBackfillRequest(msrest.serialization.Model): """ _attribute_map = { - 'data_availability_status': {'key': 'dataAvailabilityStatus', 'type': '[str]'}, - 'description': {'key': 'description', 'type': 'str'}, - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'feature_window': {'key': 'featureWindow', 'type': 'FeatureWindow'}, - 'job_id': {'key': 'jobId', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'resource': {'key': 'resource', 'type': 'MaterializationComputeResource'}, - 'spark_configuration': {'key': 'sparkConfiguration', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, + "data_availability_status": {"key": "dataAvailabilityStatus", "type": "[str]"}, + "description": {"key": "description", "type": "str"}, + "display_name": {"key": "displayName", "type": "str"}, + "feature_window": {"key": "featureWindow", "type": "FeatureWindow"}, + "job_id": {"key": "jobId", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "resource": {"key": "resource", "type": "MaterializationComputeResource"}, + "spark_configuration": {"key": "sparkConfiguration", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword data_availability_status: Specified the data availability status that you want to backfill. @@ -14176,15 +13374,15 @@ def __init__( :paramtype tags: dict[str, str] """ super(FeaturesetVersionBackfillRequest, self).__init__(**kwargs) - self.data_availability_status = kwargs.get('data_availability_status', None) - self.description = kwargs.get('description', None) - self.display_name = kwargs.get('display_name', None) - self.feature_window = kwargs.get('feature_window', None) - self.job_id = kwargs.get('job_id', None) - self.properties = kwargs.get('properties', None) - self.resource = kwargs.get('resource', None) - self.spark_configuration = kwargs.get('spark_configuration', None) - self.tags = kwargs.get('tags', None) + self.data_availability_status = kwargs.get("data_availability_status", None) + self.description = kwargs.get("description", None) + self.display_name = kwargs.get("display_name", None) + self.feature_window = kwargs.get("feature_window", None) + self.job_id = kwargs.get("job_id", None) + self.properties = kwargs.get("properties", None) + self.resource = kwargs.get("resource", None) + self.spark_configuration = kwargs.get("spark_configuration", None) + self.tags = kwargs.get("tags", None) class FeaturesetVersionBackfillResponse(msrest.serialization.Model): @@ -14195,19 +13393,16 @@ class FeaturesetVersionBackfillResponse(msrest.serialization.Model): """ _attribute_map = { - 'job_ids': {'key': 'jobIds', 'type': '[str]'}, + "job_ids": {"key": "jobIds", "type": "[str]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword job_ids: List of jobs submitted as part of the backfill request. :paramtype job_ids: list[str] """ super(FeaturesetVersionBackfillResponse, self).__init__(**kwargs) - self.job_ids = kwargs.get('job_ids', None) + self.job_ids = kwargs.get("job_ids", None) class FeaturesetVersionProperties(AssetBase): @@ -14245,27 +13440,24 @@ class FeaturesetVersionProperties(AssetBase): """ _validation = { - 'provisioning_state': {'readonly': True}, + "provisioning_state": {"readonly": True}, } _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'auto_delete_setting': {'key': 'autoDeleteSetting', 'type': 'AutoDeleteSetting'}, - 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'entities': {'key': 'entities', 'type': '[str]'}, - 'materialization_settings': {'key': 'materializationSettings', 'type': 'MaterializationSettings'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'specification': {'key': 'specification', 'type': 'FeaturesetSpecification'}, - 'stage': {'key': 'stage', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "auto_delete_setting": {"key": "autoDeleteSetting", "type": "AutoDeleteSetting"}, + "is_anonymous": {"key": "isAnonymous", "type": "bool"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "entities": {"key": "entities", "type": "[str]"}, + "materialization_settings": {"key": "materializationSettings", "type": "MaterializationSettings"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "specification": {"key": "specification", "type": "FeaturesetSpecification"}, + "stage": {"key": "stage", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword description: The asset description text. :paramtype description: str @@ -14292,11 +13484,11 @@ def __init__( :paramtype stage: str """ super(FeaturesetVersionProperties, self).__init__(**kwargs) - self.entities = kwargs.get('entities', None) - self.materialization_settings = kwargs.get('materialization_settings', None) + self.entities = kwargs.get("entities", None) + self.materialization_settings = kwargs.get("materialization_settings", None) self.provisioning_state = None - self.specification = kwargs.get('specification', None) - self.stage = kwargs.get('stage', None) + self.specification = kwargs.get("specification", None) + self.stage = kwargs.get("stage", None) class FeaturesetVersionResourceArmPaginatedResult(msrest.serialization.Model): @@ -14310,14 +13502,11 @@ class FeaturesetVersionResourceArmPaginatedResult(msrest.serialization.Model): """ _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[FeaturesetVersion]'}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[FeaturesetVersion]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword next_link: The link to the next page of FeaturesetVersion objects. If null, there are no additional pages. @@ -14326,8 +13515,8 @@ def __init__( :paramtype value: list[~azure.mgmt.machinelearningservices.models.FeaturesetVersion] """ super(FeaturesetVersionResourceArmPaginatedResult, self).__init__(**kwargs) - self.next_link = kwargs.get('next_link', None) - self.value = kwargs.get('value', None) + self.next_link = kwargs.get("next_link", None) + self.value = kwargs.get("value", None) class FeaturestoreEntityContainer(ProxyResource): @@ -14354,32 +13543,29 @@ class FeaturestoreEntityContainer(ProxyResource): """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'properties': {'key': 'properties', 'type': 'FeaturestoreEntityContainerProperties'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "FeaturestoreEntityContainerProperties"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword properties: Required. [Required] Additional attributes of the entity. :paramtype properties: ~azure.mgmt.machinelearningservices.models.FeaturestoreEntityContainerProperties """ super(FeaturestoreEntityContainer, self).__init__(**kwargs) - self.properties = kwargs['properties'] + self.properties = kwargs["properties"] class FeaturestoreEntityContainerProperties(AssetContainer): @@ -14406,25 +13592,22 @@ class FeaturestoreEntityContainerProperties(AssetContainer): """ _validation = { - 'latest_version': {'readonly': True}, - 'next_version': {'readonly': True}, - 'provisioning_state': {'readonly': True}, + "latest_version": {"readonly": True}, + "next_version": {"readonly": True}, + "provisioning_state": {"readonly": True}, } _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'latest_version': {'key': 'latestVersion', 'type': 'str'}, - 'next_version': {'key': 'nextVersion', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "latest_version": {"key": "latestVersion", "type": "str"}, + "next_version": {"key": "nextVersion", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword description: The asset description text. :paramtype description: str @@ -14450,14 +13633,11 @@ class FeaturestoreEntityContainerResourceArmPaginatedResult(msrest.serialization """ _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[FeaturestoreEntityContainer]'}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[FeaturestoreEntityContainer]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword next_link: The link to the next page of FeaturestoreEntityContainer objects. If null, there are no additional pages. @@ -14466,8 +13646,8 @@ def __init__( :paramtype value: list[~azure.mgmt.machinelearningservices.models.FeaturestoreEntityContainer] """ super(FeaturestoreEntityContainerResourceArmPaginatedResult, self).__init__(**kwargs) - self.next_link = kwargs.get('next_link', None) - self.value = kwargs.get('value', None) + self.next_link = kwargs.get("next_link", None) + self.value = kwargs.get("value", None) class FeaturestoreEntityVersion(ProxyResource): @@ -14494,32 +13674,29 @@ class FeaturestoreEntityVersion(ProxyResource): """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'properties': {'key': 'properties', 'type': 'FeaturestoreEntityVersionProperties'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "FeaturestoreEntityVersionProperties"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword properties: Required. [Required] Additional attributes of the entity. :paramtype properties: ~azure.mgmt.machinelearningservices.models.FeaturestoreEntityVersionProperties """ super(FeaturestoreEntityVersion, self).__init__(**kwargs) - self.properties = kwargs['properties'] + self.properties = kwargs["properties"] class FeaturestoreEntityVersionProperties(AssetBase): @@ -14552,25 +13729,22 @@ class FeaturestoreEntityVersionProperties(AssetBase): """ _validation = { - 'provisioning_state': {'readonly': True}, + "provisioning_state": {"readonly": True}, } _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'auto_delete_setting': {'key': 'autoDeleteSetting', 'type': 'AutoDeleteSetting'}, - 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'index_columns': {'key': 'indexColumns', 'type': '[IndexColumn]'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'stage': {'key': 'stage', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "auto_delete_setting": {"key": "autoDeleteSetting", "type": "AutoDeleteSetting"}, + "is_anonymous": {"key": "isAnonymous", "type": "bool"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "index_columns": {"key": "indexColumns", "type": "[IndexColumn]"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "stage": {"key": "stage", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword description: The asset description text. :paramtype description: str @@ -14592,9 +13766,9 @@ def __init__( :paramtype stage: str """ super(FeaturestoreEntityVersionProperties, self).__init__(**kwargs) - self.index_columns = kwargs.get('index_columns', None) + self.index_columns = kwargs.get("index_columns", None) self.provisioning_state = None - self.stage = kwargs.get('stage', None) + self.stage = kwargs.get("stage", None) class FeaturestoreEntityVersionResourceArmPaginatedResult(msrest.serialization.Model): @@ -14608,14 +13782,11 @@ class FeaturestoreEntityVersionResourceArmPaginatedResult(msrest.serialization.M """ _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[FeaturestoreEntityVersion]'}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[FeaturestoreEntityVersion]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword next_link: The link to the next page of FeaturestoreEntityVersion objects. If null, there are no additional pages. @@ -14624,8 +13795,8 @@ def __init__( :paramtype value: list[~azure.mgmt.machinelearningservices.models.FeaturestoreEntityVersion] """ super(FeaturestoreEntityVersionResourceArmPaginatedResult, self).__init__(**kwargs) - self.next_link = kwargs.get('next_link', None) - self.value = kwargs.get('value', None) + self.next_link = kwargs.get("next_link", None) + self.value = kwargs.get("value", None) class FeatureStoreSettings(msrest.serialization.Model): @@ -14640,15 +13811,12 @@ class FeatureStoreSettings(msrest.serialization.Model): """ _attribute_map = { - 'compute_runtime': {'key': 'computeRuntime', 'type': 'ComputeRuntimeDto'}, - 'offline_store_connection_name': {'key': 'offlineStoreConnectionName', 'type': 'str'}, - 'online_store_connection_name': {'key': 'onlineStoreConnectionName', 'type': 'str'}, + "compute_runtime": {"key": "computeRuntime", "type": "ComputeRuntimeDto"}, + "offline_store_connection_name": {"key": "offlineStoreConnectionName", "type": "str"}, + "online_store_connection_name": {"key": "onlineStoreConnectionName", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword compute_runtime: :paramtype compute_runtime: ~azure.mgmt.machinelearningservices.models.ComputeRuntimeDto @@ -14658,9 +13826,9 @@ def __init__( :paramtype online_store_connection_name: str """ super(FeatureStoreSettings, self).__init__(**kwargs) - self.compute_runtime = kwargs.get('compute_runtime', None) - self.offline_store_connection_name = kwargs.get('offline_store_connection_name', None) - self.online_store_connection_name = kwargs.get('online_store_connection_name', None) + self.compute_runtime = kwargs.get("compute_runtime", None) + self.offline_store_connection_name = kwargs.get("offline_store_connection_name", None) + self.online_store_connection_name = kwargs.get("online_store_connection_name", None) class FeatureSubset(MonitoringFeatureFilterBase): @@ -14678,26 +13846,23 @@ class FeatureSubset(MonitoringFeatureFilterBase): """ _validation = { - 'filter_type': {'required': True}, - 'features': {'required': True}, + "filter_type": {"required": True}, + "features": {"required": True}, } _attribute_map = { - 'filter_type': {'key': 'filterType', 'type': 'str'}, - 'features': {'key': 'features', 'type': '[str]'}, + "filter_type": {"key": "filterType", "type": "str"}, + "features": {"key": "features", "type": "[str]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword features: Required. [Required] The list of features to include. :paramtype features: list[str] """ super(FeatureSubset, self).__init__(**kwargs) - self.filter_type = 'FeatureSubset' # type: str - self.features = kwargs['features'] + self.filter_type = "FeatureSubset" # type: str + self.features = kwargs["features"] class FeatureWindow(msrest.serialization.Model): @@ -14710,14 +13875,11 @@ class FeatureWindow(msrest.serialization.Model): """ _attribute_map = { - 'feature_window_end': {'key': 'featureWindowEnd', 'type': 'iso-8601'}, - 'feature_window_start': {'key': 'featureWindowStart', 'type': 'iso-8601'}, + "feature_window_end": {"key": "featureWindowEnd", "type": "iso-8601"}, + "feature_window_start": {"key": "featureWindowStart", "type": "iso-8601"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword feature_window_end: Specifies the feature window end time. :paramtype feature_window_end: ~datetime.datetime @@ -14725,8 +13887,8 @@ def __init__( :paramtype feature_window_start: ~datetime.datetime """ super(FeatureWindow, self).__init__(**kwargs) - self.feature_window_end = kwargs.get('feature_window_end', None) - self.feature_window_start = kwargs.get('feature_window_start', None) + self.feature_window_end = kwargs.get("feature_window_end", None) + self.feature_window_start = kwargs.get("feature_window_start", None) class FeaturizationSettings(msrest.serialization.Model): @@ -14737,19 +13899,16 @@ class FeaturizationSettings(msrest.serialization.Model): """ _attribute_map = { - 'dataset_language': {'key': 'datasetLanguage', 'type': 'str'}, + "dataset_language": {"key": "datasetLanguage", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword dataset_language: Dataset language, useful for the text data. :paramtype dataset_language: str """ super(FeaturizationSettings, self).__init__(**kwargs) - self.dataset_language = kwargs.get('dataset_language', None) + self.dataset_language = kwargs.get("dataset_language", None) class FileSystemSource(DataImportSource): @@ -14767,19 +13926,16 @@ class FileSystemSource(DataImportSource): """ _validation = { - 'source_type': {'required': True}, + "source_type": {"required": True}, } _attribute_map = { - 'connection': {'key': 'connection', 'type': 'str'}, - 'source_type': {'key': 'sourceType', 'type': 'str'}, - 'path': {'key': 'path', 'type': 'str'}, + "connection": {"key": "connection", "type": "str"}, + "source_type": {"key": "sourceType", "type": "str"}, + "path": {"key": "path", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword connection: Workspace connection for data import source storage. :paramtype connection: str @@ -14787,8 +13943,8 @@ def __init__( :paramtype path: str """ super(FileSystemSource, self).__init__(**kwargs) - self.source_type = 'file_system' # type: str - self.path = kwargs.get('path', None) + self.source_type = "file_system" # type: str + self.path = kwargs.get("path", None) class FineTuningJob(JobBaseProperties): @@ -14842,35 +13998,32 @@ class FineTuningJob(JobBaseProperties): """ _validation = { - 'job_type': {'required': True}, - 'status': {'readonly': True}, - 'fine_tuning_details': {'required': True}, - 'outputs': {'required': True}, + "job_type": {"required": True}, + "status": {"readonly": True}, + "fine_tuning_details": {"required": True}, + "outputs": {"required": True}, } _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'component_id': {'key': 'componentId', 'type': 'str'}, - 'compute_id': {'key': 'computeId', 'type': 'str'}, - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'experiment_name': {'key': 'experimentName', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'IdentityConfiguration'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'job_type': {'key': 'jobType', 'type': 'str'}, - 'notification_setting': {'key': 'notificationSetting', 'type': 'NotificationSetting'}, - 'secrets_configuration': {'key': 'secretsConfiguration', 'type': '{SecretConfiguration}'}, - 'services': {'key': 'services', 'type': '{JobService}'}, - 'status': {'key': 'status', 'type': 'str'}, - 'fine_tuning_details': {'key': 'fineTuningDetails', 'type': 'FineTuningVertical'}, - 'outputs': {'key': 'outputs', 'type': '{JobOutput}'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "component_id": {"key": "componentId", "type": "str"}, + "compute_id": {"key": "computeId", "type": "str"}, + "display_name": {"key": "displayName", "type": "str"}, + "experiment_name": {"key": "experimentName", "type": "str"}, + "identity": {"key": "identity", "type": "IdentityConfiguration"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "job_type": {"key": "jobType", "type": "str"}, + "notification_setting": {"key": "notificationSetting", "type": "NotificationSetting"}, + "secrets_configuration": {"key": "secretsConfiguration", "type": "{SecretConfiguration}"}, + "services": {"key": "services", "type": "{JobService}"}, + "status": {"key": "status", "type": "str"}, + "fine_tuning_details": {"key": "fineTuningDetails", "type": "FineTuningVertical"}, + "outputs": {"key": "outputs", "type": "{JobOutput}"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword description: The asset description text. :paramtype description: str @@ -14907,9 +14060,9 @@ def __init__( :paramtype outputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobOutput] """ super(FineTuningJob, self).__init__(**kwargs) - self.job_type = 'FineTuning' # type: str - self.fine_tuning_details = kwargs['fine_tuning_details'] - self.outputs = kwargs['outputs'] + self.job_type = "FineTuning" # type: str + self.fine_tuning_details = kwargs["fine_tuning_details"] + self.outputs = kwargs["outputs"] class MonitoringInputDataBase(msrest.serialization.Model): @@ -14936,27 +14089,24 @@ class MonitoringInputDataBase(msrest.serialization.Model): """ _validation = { - 'input_data_type': {'required': True}, - 'job_input_type': {'required': True}, - 'uri': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "input_data_type": {"required": True}, + "job_input_type": {"required": True}, + "uri": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'columns': {'key': 'columns', 'type': '{str}'}, - 'data_context': {'key': 'dataContext', 'type': 'str'}, - 'input_data_type': {'key': 'inputDataType', 'type': 'str'}, - 'job_input_type': {'key': 'jobInputType', 'type': 'str'}, - 'uri': {'key': 'uri', 'type': 'str'}, + "columns": {"key": "columns", "type": "{str}"}, + "data_context": {"key": "dataContext", "type": "str"}, + "input_data_type": {"key": "inputDataType", "type": "str"}, + "job_input_type": {"key": "jobInputType", "type": "str"}, + "uri": {"key": "uri", "type": "str"}, } _subtype_map = { - 'input_data_type': {'Fixed': 'FixedInputData', 'Rolling': 'RollingInputData', 'Static': 'StaticInputData'} + "input_data_type": {"Fixed": "FixedInputData", "Rolling": "RollingInputData", "Static": "StaticInputData"} } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword columns: Mapping of column names to special uses. :paramtype columns: dict[str, str] @@ -14970,11 +14120,11 @@ def __init__( :paramtype uri: str """ super(MonitoringInputDataBase, self).__init__(**kwargs) - self.columns = kwargs.get('columns', None) - self.data_context = kwargs.get('data_context', None) + self.columns = kwargs.get("columns", None) + self.data_context = kwargs.get("data_context", None) self.input_data_type = None # type: Optional[str] - self.job_input_type = kwargs['job_input_type'] - self.uri = kwargs['uri'] + self.job_input_type = kwargs["job_input_type"] + self.uri = kwargs["uri"] class FixedInputData(MonitoringInputDataBase): @@ -14998,23 +14148,20 @@ class FixedInputData(MonitoringInputDataBase): """ _validation = { - 'input_data_type': {'required': True}, - 'job_input_type': {'required': True}, - 'uri': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "input_data_type": {"required": True}, + "job_input_type": {"required": True}, + "uri": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'columns': {'key': 'columns', 'type': '{str}'}, - 'data_context': {'key': 'dataContext', 'type': 'str'}, - 'input_data_type': {'key': 'inputDataType', 'type': 'str'}, - 'job_input_type': {'key': 'jobInputType', 'type': 'str'}, - 'uri': {'key': 'uri', 'type': 'str'}, + "columns": {"key": "columns", "type": "{str}"}, + "data_context": {"key": "dataContext", "type": "str"}, + "input_data_type": {"key": "inputDataType", "type": "str"}, + "job_input_type": {"key": "jobInputType", "type": "str"}, + "uri": {"key": "uri", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword columns: Mapping of column names to special uses. :paramtype columns: dict[str, str] @@ -15028,7 +14175,7 @@ def __init__( :paramtype uri: str """ super(FixedInputData, self).__init__(**kwargs) - self.input_data_type = 'Fixed' # type: str + self.input_data_type = "Fixed" # type: str class FlavorData(msrest.serialization.Model): @@ -15039,19 +14186,16 @@ class FlavorData(msrest.serialization.Model): """ _attribute_map = { - 'data': {'key': 'data', 'type': '{str}'}, + "data": {"key": "data", "type": "{str}"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword data: Model flavor-specific data. :paramtype data: dict[str, str] """ super(FlavorData, self).__init__(**kwargs) - self.data = kwargs.get('data', None) + self.data = kwargs.get("data", None) class Forecasting(AutoMLVertical, TableVertical): @@ -15120,36 +14264,33 @@ class Forecasting(AutoMLVertical, TableVertical): """ _validation = { - 'task_type': {'required': True}, - 'training_data': {'required': True}, + "task_type": {"required": True}, + "training_data": {"required": True}, } _attribute_map = { - 'cv_split_column_names': {'key': 'cvSplitColumnNames', 'type': '[str]'}, - 'featurization_settings': {'key': 'featurizationSettings', 'type': 'TableVerticalFeaturizationSettings'}, - 'fixed_parameters': {'key': 'fixedParameters', 'type': 'TableFixedParameters'}, - 'limit_settings': {'key': 'limitSettings', 'type': 'TableVerticalLimitSettings'}, - 'n_cross_validations': {'key': 'nCrossValidations', 'type': 'NCrossValidations'}, - 'search_space': {'key': 'searchSpace', 'type': '[TableParameterSubspace]'}, - 'sweep_settings': {'key': 'sweepSettings', 'type': 'TableSweepSettings'}, - 'test_data': {'key': 'testData', 'type': 'MLTableJobInput'}, - 'test_data_size': {'key': 'testDataSize', 'type': 'float'}, - 'validation_data': {'key': 'validationData', 'type': 'MLTableJobInput'}, - 'validation_data_size': {'key': 'validationDataSize', 'type': 'float'}, - 'weight_column_name': {'key': 'weightColumnName', 'type': 'str'}, - 'log_verbosity': {'key': 'logVerbosity', 'type': 'str'}, - 'target_column_name': {'key': 'targetColumnName', 'type': 'str'}, - 'task_type': {'key': 'taskType', 'type': 'str'}, - 'training_data': {'key': 'trainingData', 'type': 'MLTableJobInput'}, - 'forecasting_settings': {'key': 'forecastingSettings', 'type': 'ForecastingSettings'}, - 'primary_metric': {'key': 'primaryMetric', 'type': 'str'}, - 'training_settings': {'key': 'trainingSettings', 'type': 'ForecastingTrainingSettings'}, + "cv_split_column_names": {"key": "cvSplitColumnNames", "type": "[str]"}, + "featurization_settings": {"key": "featurizationSettings", "type": "TableVerticalFeaturizationSettings"}, + "fixed_parameters": {"key": "fixedParameters", "type": "TableFixedParameters"}, + "limit_settings": {"key": "limitSettings", "type": "TableVerticalLimitSettings"}, + "n_cross_validations": {"key": "nCrossValidations", "type": "NCrossValidations"}, + "search_space": {"key": "searchSpace", "type": "[TableParameterSubspace]"}, + "sweep_settings": {"key": "sweepSettings", "type": "TableSweepSettings"}, + "test_data": {"key": "testData", "type": "MLTableJobInput"}, + "test_data_size": {"key": "testDataSize", "type": "float"}, + "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, + "validation_data_size": {"key": "validationDataSize", "type": "float"}, + "weight_column_name": {"key": "weightColumnName", "type": "str"}, + "log_verbosity": {"key": "logVerbosity", "type": "str"}, + "target_column_name": {"key": "targetColumnName", "type": "str"}, + "task_type": {"key": "taskType", "type": "str"}, + "training_data": {"key": "trainingData", "type": "MLTableJobInput"}, + "forecasting_settings": {"key": "forecastingSettings", "type": "ForecastingSettings"}, + "primary_metric": {"key": "primaryMetric", "type": "str"}, + "training_settings": {"key": "trainingSettings", "type": "ForecastingTrainingSettings"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword cv_split_column_names: Columns to use for CVSplit data. :paramtype cv_split_column_names: list[str] @@ -15209,25 +14350,25 @@ def __init__( ~azure.mgmt.machinelearningservices.models.ForecastingTrainingSettings """ super(Forecasting, self).__init__(**kwargs) - self.cv_split_column_names = kwargs.get('cv_split_column_names', None) - self.featurization_settings = kwargs.get('featurization_settings', None) - self.fixed_parameters = kwargs.get('fixed_parameters', None) - self.limit_settings = kwargs.get('limit_settings', None) - self.n_cross_validations = kwargs.get('n_cross_validations', None) - self.search_space = kwargs.get('search_space', None) - self.sweep_settings = kwargs.get('sweep_settings', None) - self.test_data = kwargs.get('test_data', None) - self.test_data_size = kwargs.get('test_data_size', None) - self.validation_data = kwargs.get('validation_data', None) - self.validation_data_size = kwargs.get('validation_data_size', None) - self.weight_column_name = kwargs.get('weight_column_name', None) - self.task_type = 'Forecasting' # type: str - self.forecasting_settings = kwargs.get('forecasting_settings', None) - self.primary_metric = kwargs.get('primary_metric', None) - self.training_settings = kwargs.get('training_settings', None) - self.log_verbosity = kwargs.get('log_verbosity', None) - self.target_column_name = kwargs.get('target_column_name', None) - self.training_data = kwargs['training_data'] + self.cv_split_column_names = kwargs.get("cv_split_column_names", None) + self.featurization_settings = kwargs.get("featurization_settings", None) + self.fixed_parameters = kwargs.get("fixed_parameters", None) + self.limit_settings = kwargs.get("limit_settings", None) + self.n_cross_validations = kwargs.get("n_cross_validations", None) + self.search_space = kwargs.get("search_space", None) + self.sweep_settings = kwargs.get("sweep_settings", None) + self.test_data = kwargs.get("test_data", None) + self.test_data_size = kwargs.get("test_data_size", None) + self.validation_data = kwargs.get("validation_data", None) + self.validation_data_size = kwargs.get("validation_data_size", None) + self.weight_column_name = kwargs.get("weight_column_name", None) + self.task_type = "Forecasting" # type: str + self.forecasting_settings = kwargs.get("forecasting_settings", None) + self.primary_metric = kwargs.get("primary_metric", None) + self.training_settings = kwargs.get("training_settings", None) + self.log_verbosity = kwargs.get("log_verbosity", None) + self.target_column_name = kwargs.get("target_column_name", None) + self.training_data = kwargs["training_data"] class ForecastingSettings(msrest.serialization.Model): @@ -15290,26 +14431,23 @@ class ForecastingSettings(msrest.serialization.Model): """ _attribute_map = { - 'country_or_region_for_holidays': {'key': 'countryOrRegionForHolidays', 'type': 'str'}, - 'cv_step_size': {'key': 'cvStepSize', 'type': 'int'}, - 'feature_lags': {'key': 'featureLags', 'type': 'str'}, - 'features_unknown_at_forecast_time': {'key': 'featuresUnknownAtForecastTime', 'type': '[str]'}, - 'forecast_horizon': {'key': 'forecastHorizon', 'type': 'ForecastHorizon'}, - 'frequency': {'key': 'frequency', 'type': 'str'}, - 'seasonality': {'key': 'seasonality', 'type': 'Seasonality'}, - 'short_series_handling_config': {'key': 'shortSeriesHandlingConfig', 'type': 'str'}, - 'target_aggregate_function': {'key': 'targetAggregateFunction', 'type': 'str'}, - 'target_lags': {'key': 'targetLags', 'type': 'TargetLags'}, - 'target_rolling_window_size': {'key': 'targetRollingWindowSize', 'type': 'TargetRollingWindowSize'}, - 'time_column_name': {'key': 'timeColumnName', 'type': 'str'}, - 'time_series_id_column_names': {'key': 'timeSeriesIdColumnNames', 'type': '[str]'}, - 'use_stl': {'key': 'useStl', 'type': 'str'}, + "country_or_region_for_holidays": {"key": "countryOrRegionForHolidays", "type": "str"}, + "cv_step_size": {"key": "cvStepSize", "type": "int"}, + "feature_lags": {"key": "featureLags", "type": "str"}, + "features_unknown_at_forecast_time": {"key": "featuresUnknownAtForecastTime", "type": "[str]"}, + "forecast_horizon": {"key": "forecastHorizon", "type": "ForecastHorizon"}, + "frequency": {"key": "frequency", "type": "str"}, + "seasonality": {"key": "seasonality", "type": "Seasonality"}, + "short_series_handling_config": {"key": "shortSeriesHandlingConfig", "type": "str"}, + "target_aggregate_function": {"key": "targetAggregateFunction", "type": "str"}, + "target_lags": {"key": "targetLags", "type": "TargetLags"}, + "target_rolling_window_size": {"key": "targetRollingWindowSize", "type": "TargetRollingWindowSize"}, + "time_column_name": {"key": "timeColumnName", "type": "str"}, + "time_series_id_column_names": {"key": "timeSeriesIdColumnNames", "type": "[str]"}, + "use_stl": {"key": "useStl", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword country_or_region_for_holidays: Country or region for holidays for forecasting tasks. These should be ISO 3166 two-letter country/region codes, for example 'US' or 'GB'. @@ -15369,20 +14507,20 @@ def __init__( :paramtype use_stl: str or ~azure.mgmt.machinelearningservices.models.UseStl """ super(ForecastingSettings, self).__init__(**kwargs) - self.country_or_region_for_holidays = kwargs.get('country_or_region_for_holidays', None) - self.cv_step_size = kwargs.get('cv_step_size', None) - self.feature_lags = kwargs.get('feature_lags', None) - self.features_unknown_at_forecast_time = kwargs.get('features_unknown_at_forecast_time', None) - self.forecast_horizon = kwargs.get('forecast_horizon', None) - self.frequency = kwargs.get('frequency', None) - self.seasonality = kwargs.get('seasonality', None) - self.short_series_handling_config = kwargs.get('short_series_handling_config', None) - self.target_aggregate_function = kwargs.get('target_aggregate_function', None) - self.target_lags = kwargs.get('target_lags', None) - self.target_rolling_window_size = kwargs.get('target_rolling_window_size', None) - self.time_column_name = kwargs.get('time_column_name', None) - self.time_series_id_column_names = kwargs.get('time_series_id_column_names', None) - self.use_stl = kwargs.get('use_stl', None) + self.country_or_region_for_holidays = kwargs.get("country_or_region_for_holidays", None) + self.cv_step_size = kwargs.get("cv_step_size", None) + self.feature_lags = kwargs.get("feature_lags", None) + self.features_unknown_at_forecast_time = kwargs.get("features_unknown_at_forecast_time", None) + self.forecast_horizon = kwargs.get("forecast_horizon", None) + self.frequency = kwargs.get("frequency", None) + self.seasonality = kwargs.get("seasonality", None) + self.short_series_handling_config = kwargs.get("short_series_handling_config", None) + self.target_aggregate_function = kwargs.get("target_aggregate_function", None) + self.target_lags = kwargs.get("target_lags", None) + self.target_rolling_window_size = kwargs.get("target_rolling_window_size", None) + self.time_column_name = kwargs.get("time_column_name", None) + self.time_series_id_column_names = kwargs.get("time_series_id_column_names", None) + self.use_stl = kwargs.get("use_stl", None) class ForecastingTrainingSettings(TrainingSettings): @@ -15422,22 +14560,19 @@ class ForecastingTrainingSettings(TrainingSettings): """ _attribute_map = { - 'enable_dnn_training': {'key': 'enableDnnTraining', 'type': 'bool'}, - 'enable_model_explainability': {'key': 'enableModelExplainability', 'type': 'bool'}, - 'enable_onnx_compatible_models': {'key': 'enableOnnxCompatibleModels', 'type': 'bool'}, - 'enable_stack_ensemble': {'key': 'enableStackEnsemble', 'type': 'bool'}, - 'enable_vote_ensemble': {'key': 'enableVoteEnsemble', 'type': 'bool'}, - 'ensemble_model_download_timeout': {'key': 'ensembleModelDownloadTimeout', 'type': 'duration'}, - 'stack_ensemble_settings': {'key': 'stackEnsembleSettings', 'type': 'StackEnsembleSettings'}, - 'training_mode': {'key': 'trainingMode', 'type': 'str'}, - 'allowed_training_algorithms': {'key': 'allowedTrainingAlgorithms', 'type': '[str]'}, - 'blocked_training_algorithms': {'key': 'blockedTrainingAlgorithms', 'type': '[str]'}, + "enable_dnn_training": {"key": "enableDnnTraining", "type": "bool"}, + "enable_model_explainability": {"key": "enableModelExplainability", "type": "bool"}, + "enable_onnx_compatible_models": {"key": "enableOnnxCompatibleModels", "type": "bool"}, + "enable_stack_ensemble": {"key": "enableStackEnsemble", "type": "bool"}, + "enable_vote_ensemble": {"key": "enableVoteEnsemble", "type": "bool"}, + "ensemble_model_download_timeout": {"key": "ensembleModelDownloadTimeout", "type": "duration"}, + "stack_ensemble_settings": {"key": "stackEnsembleSettings", "type": "StackEnsembleSettings"}, + "training_mode": {"key": "trainingMode", "type": "str"}, + "allowed_training_algorithms": {"key": "allowedTrainingAlgorithms", "type": "[str]"}, + "blocked_training_algorithms": {"key": "blockedTrainingAlgorithms", "type": "[str]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword enable_dnn_training: Enable recommendation of DNN models. :paramtype enable_dnn_training: bool @@ -15472,8 +14607,8 @@ def __init__( ~azure.mgmt.machinelearningservices.models.ForecastingModels] """ super(ForecastingTrainingSettings, self).__init__(**kwargs) - self.allowed_training_algorithms = kwargs.get('allowed_training_algorithms', None) - self.blocked_training_algorithms = kwargs.get('blocked_training_algorithms', None) + self.allowed_training_algorithms = kwargs.get("allowed_training_algorithms", None) + self.blocked_training_algorithms = kwargs.get("blocked_training_algorithms", None) class FQDNEndpoint(msrest.serialization.Model): @@ -15486,14 +14621,11 @@ class FQDNEndpoint(msrest.serialization.Model): """ _attribute_map = { - 'domain_name': {'key': 'domainName', 'type': 'str'}, - 'endpoint_details': {'key': 'endpointDetails', 'type': '[FQDNEndpointDetail]'}, + "domain_name": {"key": "domainName", "type": "str"}, + "endpoint_details": {"key": "endpointDetails", "type": "[FQDNEndpointDetail]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword domain_name: :paramtype domain_name: str @@ -15502,8 +14634,8 @@ def __init__( list[~azure.mgmt.machinelearningservices.models.FQDNEndpointDetail] """ super(FQDNEndpoint, self).__init__(**kwargs) - self.domain_name = kwargs.get('domain_name', None) - self.endpoint_details = kwargs.get('endpoint_details', None) + self.domain_name = kwargs.get("domain_name", None) + self.endpoint_details = kwargs.get("endpoint_details", None) class FQDNEndpointDetail(msrest.serialization.Model): @@ -15514,19 +14646,16 @@ class FQDNEndpointDetail(msrest.serialization.Model): """ _attribute_map = { - 'port': {'key': 'port', 'type': 'int'}, + "port": {"key": "port", "type": "int"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword port: :paramtype port: int """ super(FQDNEndpointDetail, self).__init__(**kwargs) - self.port = kwargs.get('port', None) + self.port = kwargs.get("port", None) class FQDNEndpoints(msrest.serialization.Model): @@ -15539,14 +14668,11 @@ class FQDNEndpoints(msrest.serialization.Model): """ _attribute_map = { - 'category': {'key': 'category', 'type': 'str'}, - 'endpoints': {'key': 'endpoints', 'type': '[FQDNEndpoint]'}, + "category": {"key": "category", "type": "str"}, + "endpoints": {"key": "endpoints", "type": "[FQDNEndpoint]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword category: :paramtype category: str @@ -15554,8 +14680,8 @@ def __init__( :paramtype endpoints: list[~azure.mgmt.machinelearningservices.models.FQDNEndpoint] """ super(FQDNEndpoints, self).__init__(**kwargs) - self.category = kwargs.get('category', None) - self.endpoints = kwargs.get('endpoints', None) + self.category = kwargs.get("category", None) + self.endpoints = kwargs.get("endpoints", None) class FQDNEndpointsPropertyBag(msrest.serialization.Model): @@ -15566,19 +14692,16 @@ class FQDNEndpointsPropertyBag(msrest.serialization.Model): """ _attribute_map = { - 'properties': {'key': 'properties', 'type': 'FQDNEndpoints'}, + "properties": {"key": "properties", "type": "FQDNEndpoints"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword properties: :paramtype properties: ~azure.mgmt.machinelearningservices.models.FQDNEndpoints """ super(FQDNEndpointsPropertyBag, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) + self.properties = kwargs.get("properties", None) class OutboundRule(msrest.serialization.Model): @@ -15602,24 +14725,24 @@ class OutboundRule(msrest.serialization.Model): """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'category': {'key': 'category', 'type': 'str'}, - 'status': {'key': 'status', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, + "category": {"key": "category", "type": "str"}, + "status": {"key": "status", "type": "str"}, + "type": {"key": "type", "type": "str"}, } _subtype_map = { - 'type': {'FQDN': 'FqdnOutboundRule', 'PrivateEndpoint': 'PrivateEndpointOutboundRule', - 'ServiceTag': 'ServiceTagOutboundRule'} + "type": { + "FQDN": "FqdnOutboundRule", + "PrivateEndpoint": "PrivateEndpointOutboundRule", + "ServiceTag": "ServiceTagOutboundRule", + } } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword category: Category of a managed network Outbound Rule of a machine learning workspace. Possible values include: "Required", "Recommended", "UserDefined". @@ -15629,8 +14752,8 @@ def __init__( :paramtype status: str or ~azure.mgmt.machinelearningservices.models.RuleStatus """ super(OutboundRule, self).__init__(**kwargs) - self.category = kwargs.get('category', None) - self.status = kwargs.get('status', None) + self.category = kwargs.get("category", None) + self.status = kwargs.get("status", None) self.type = None # type: Optional[str] @@ -15654,20 +14777,17 @@ class FqdnOutboundRule(OutboundRule): """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'category': {'key': 'category', 'type': 'str'}, - 'status': {'key': 'status', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'destination': {'key': 'destination', 'type': 'str'}, + "category": {"key": "category", "type": "str"}, + "status": {"key": "status", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "destination": {"key": "destination", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword category: Category of a managed network Outbound Rule of a machine learning workspace. Possible values include: "Required", "Recommended", "UserDefined". @@ -15679,8 +14799,8 @@ def __init__( :paramtype destination: str """ super(FqdnOutboundRule, self).__init__(**kwargs) - self.type = 'FQDN' # type: str - self.destination = kwargs.get('destination', None) + self.type = "FQDN" # type: str + self.destination = kwargs.get("destination", None) class GenerationSafetyQualityMetricThreshold(msrest.serialization.Model): @@ -15703,18 +14823,15 @@ class GenerationSafetyQualityMetricThreshold(msrest.serialization.Model): """ _validation = { - 'metric': {'required': True}, + "metric": {"required": True}, } _attribute_map = { - 'metric': {'key': 'metric', 'type': 'str'}, - 'threshold': {'key': 'threshold', 'type': 'MonitoringThreshold'}, + "metric": {"key": "metric", "type": "str"}, + "threshold": {"key": "threshold", "type": "MonitoringThreshold"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword metric: Required. [Required] Gets or sets the feature attribution metric to calculate. Possible values include: "AcceptableGroundednessScorePerInstance", @@ -15730,8 +14847,8 @@ def __init__( :paramtype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold """ super(GenerationSafetyQualityMetricThreshold, self).__init__(**kwargs) - self.metric = kwargs['metric'] - self.threshold = kwargs.get('threshold', None) + self.metric = kwargs["metric"] + self.threshold = kwargs.get("threshold", None) class GenerationSafetyQualityMonitoringSignal(MonitoringSignalBase): @@ -15765,25 +14882,22 @@ class GenerationSafetyQualityMonitoringSignal(MonitoringSignalBase): """ _validation = { - 'signal_type': {'required': True}, - 'metric_thresholds': {'required': True}, - 'sampling_rate': {'required': True}, + "signal_type": {"required": True}, + "metric_thresholds": {"required": True}, + "sampling_rate": {"required": True}, } _attribute_map = { - 'notification_types': {'key': 'notificationTypes', 'type': '[str]'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'signal_type': {'key': 'signalType', 'type': 'str'}, - 'metric_thresholds': {'key': 'metricThresholds', 'type': '[GenerationSafetyQualityMetricThreshold]'}, - 'production_data': {'key': 'productionData', 'type': '[MonitoringInputDataBase]'}, - 'sampling_rate': {'key': 'samplingRate', 'type': 'float'}, - 'workspace_connection_id': {'key': 'workspaceConnectionId', 'type': 'str'}, + "notification_types": {"key": "notificationTypes", "type": "[str]"}, + "properties": {"key": "properties", "type": "{str}"}, + "signal_type": {"key": "signalType", "type": "str"}, + "metric_thresholds": {"key": "metricThresholds", "type": "[GenerationSafetyQualityMetricThreshold]"}, + "production_data": {"key": "productionData", "type": "[MonitoringInputDataBase]"}, + "sampling_rate": {"key": "samplingRate", "type": "float"}, + "workspace_connection_id": {"key": "workspaceConnectionId", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword notification_types: The current notification mode for this signal. :paramtype notification_types: list[str or @@ -15805,11 +14919,11 @@ def __init__( :paramtype workspace_connection_id: str """ super(GenerationSafetyQualityMonitoringSignal, self).__init__(**kwargs) - self.signal_type = 'GenerationSafetyQuality' # type: str - self.metric_thresholds = kwargs['metric_thresholds'] - self.production_data = kwargs.get('production_data', None) - self.sampling_rate = kwargs['sampling_rate'] - self.workspace_connection_id = kwargs.get('workspace_connection_id', None) + self.signal_type = "GenerationSafetyQuality" # type: str + self.metric_thresholds = kwargs["metric_thresholds"] + self.production_data = kwargs.get("production_data", None) + self.sampling_rate = kwargs["sampling_rate"] + self.workspace_connection_id = kwargs.get("workspace_connection_id", None) class GenerationTokenUsageMetricThreshold(msrest.serialization.Model): @@ -15826,18 +14940,15 @@ class GenerationTokenUsageMetricThreshold(msrest.serialization.Model): """ _validation = { - 'metric': {'required': True}, + "metric": {"required": True}, } _attribute_map = { - 'metric': {'key': 'metric', 'type': 'str'}, - 'threshold': {'key': 'threshold', 'type': 'MonitoringThreshold'}, + "metric": {"key": "metric", "type": "str"}, + "threshold": {"key": "threshold", "type": "MonitoringThreshold"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword metric: Required. [Required] Gets or sets the feature attribution metric to calculate. Possible values include: "TotalTokenCount", "TotalTokenCountPerGroup". @@ -15847,8 +14958,8 @@ def __init__( :paramtype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold """ super(GenerationTokenUsageMetricThreshold, self).__init__(**kwargs) - self.metric = kwargs['metric'] - self.threshold = kwargs.get('threshold', None) + self.metric = kwargs["metric"] + self.threshold = kwargs.get("threshold", None) class GenerationTokenUsageSignal(MonitoringSignalBase): @@ -15879,24 +14990,21 @@ class GenerationTokenUsageSignal(MonitoringSignalBase): """ _validation = { - 'signal_type': {'required': True}, - 'metric_thresholds': {'required': True}, - 'sampling_rate': {'required': True}, + "signal_type": {"required": True}, + "metric_thresholds": {"required": True}, + "sampling_rate": {"required": True}, } _attribute_map = { - 'notification_types': {'key': 'notificationTypes', 'type': '[str]'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'signal_type': {'key': 'signalType', 'type': 'str'}, - 'metric_thresholds': {'key': 'metricThresholds', 'type': '[GenerationTokenUsageMetricThreshold]'}, - 'production_data': {'key': 'productionData', 'type': '[MonitoringInputDataBase]'}, - 'sampling_rate': {'key': 'samplingRate', 'type': 'float'}, + "notification_types": {"key": "notificationTypes", "type": "[str]"}, + "properties": {"key": "properties", "type": "{str}"}, + "signal_type": {"key": "signalType", "type": "str"}, + "metric_thresholds": {"key": "metricThresholds", "type": "[GenerationTokenUsageMetricThreshold]"}, + "production_data": {"key": "productionData", "type": "[MonitoringInputDataBase]"}, + "sampling_rate": {"key": "samplingRate", "type": "float"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword notification_types: The current notification mode for this signal. :paramtype notification_types: list[str or @@ -15915,10 +15023,10 @@ def __init__( :paramtype sampling_rate: float """ super(GenerationTokenUsageSignal, self).__init__(**kwargs) - self.signal_type = 'GenerationTokenStatistics' # type: str - self.metric_thresholds = kwargs['metric_thresholds'] - self.production_data = kwargs.get('production_data', None) - self.sampling_rate = kwargs['sampling_rate'] + self.signal_type = "GenerationTokenStatistics" # type: str + self.metric_thresholds = kwargs["metric_thresholds"] + self.production_data = kwargs.get("production_data", None) + self.sampling_rate = kwargs["sampling_rate"] class GetBlobReferenceForConsumptionDto(msrest.serialization.Model): @@ -15933,15 +15041,12 @@ class GetBlobReferenceForConsumptionDto(msrest.serialization.Model): """ _attribute_map = { - 'blob_uri': {'key': 'blobUri', 'type': 'str'}, - 'credential': {'key': 'credential', 'type': 'DataReferenceCredential'}, - 'storage_account_arm_id': {'key': 'storageAccountArmId', 'type': 'str'}, + "blob_uri": {"key": "blobUri", "type": "str"}, + "credential": {"key": "credential", "type": "DataReferenceCredential"}, + "storage_account_arm_id": {"key": "storageAccountArmId", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword blob_uri: Blob uri, example: https://blob.windows.core.net/Container/Path. :paramtype blob_uri: str @@ -15951,9 +15056,9 @@ def __init__( :paramtype storage_account_arm_id: str """ super(GetBlobReferenceForConsumptionDto, self).__init__(**kwargs) - self.blob_uri = kwargs.get('blob_uri', None) - self.credential = kwargs.get('credential', None) - self.storage_account_arm_id = kwargs.get('storage_account_arm_id', None) + self.blob_uri = kwargs.get("blob_uri", None) + self.credential = kwargs.get("credential", None) + self.storage_account_arm_id = kwargs.get("storage_account_arm_id", None) class GetBlobReferenceSASRequestDto(msrest.serialization.Model): @@ -15966,14 +15071,11 @@ class GetBlobReferenceSASRequestDto(msrest.serialization.Model): """ _attribute_map = { - 'asset_id': {'key': 'assetId', 'type': 'str'}, - 'blob_uri': {'key': 'blobUri', 'type': 'str'}, + "asset_id": {"key": "assetId", "type": "str"}, + "blob_uri": {"key": "blobUri", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword asset_id: Id of the asset to be accessed. :paramtype asset_id: str @@ -15981,8 +15083,8 @@ def __init__( :paramtype blob_uri: str """ super(GetBlobReferenceSASRequestDto, self).__init__(**kwargs) - self.asset_id = kwargs.get('asset_id', None) - self.blob_uri = kwargs.get('blob_uri', None) + self.asset_id = kwargs.get("asset_id", None) + self.blob_uri = kwargs.get("blob_uri", None) class GetBlobReferenceSASResponseDto(msrest.serialization.Model): @@ -15994,21 +15096,20 @@ class GetBlobReferenceSASResponseDto(msrest.serialization.Model): """ _attribute_map = { - 'blob_reference_for_consumption': {'key': 'blobReferenceForConsumption', - 'type': 'GetBlobReferenceForConsumptionDto'}, + "blob_reference_for_consumption": { + "key": "blobReferenceForConsumption", + "type": "GetBlobReferenceForConsumptionDto", + }, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword blob_reference_for_consumption: Blob reference for consumption details. :paramtype blob_reference_for_consumption: ~azure.mgmt.machinelearningservices.models.GetBlobReferenceForConsumptionDto """ super(GetBlobReferenceSASResponseDto, self).__init__(**kwargs) - self.blob_reference_for_consumption = kwargs.get('blob_reference_for_consumption', None) + self.blob_reference_for_consumption = kwargs.get("blob_reference_for_consumption", None) class GridSamplingAlgorithm(SamplingAlgorithm): @@ -16024,21 +15125,17 @@ class GridSamplingAlgorithm(SamplingAlgorithm): """ _validation = { - 'sampling_algorithm_type': {'required': True}, + "sampling_algorithm_type": {"required": True}, } _attribute_map = { - 'sampling_algorithm_type': {'key': 'samplingAlgorithmType', 'type': 'str'}, + "sampling_algorithm_type": {"key": "samplingAlgorithmType", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(GridSamplingAlgorithm, self).__init__(**kwargs) - self.sampling_algorithm_type = 'Grid' # type: str + self.sampling_algorithm_type = "Grid" # type: str class GroupStatus(msrest.serialization.Model): @@ -16055,16 +15152,13 @@ class GroupStatus(msrest.serialization.Model): """ _attribute_map = { - 'actual_capacity_info': {'key': 'actualCapacityInfo', 'type': 'ActualCapacityInfo'}, - 'bonus_extra_capacity': {'key': 'bonusExtraCapacity', 'type': 'int'}, - 'endpoint_count': {'key': 'endpointCount', 'type': 'int'}, - 'requested_capacity': {'key': 'requestedCapacity', 'type': 'int'}, + "actual_capacity_info": {"key": "actualCapacityInfo", "type": "ActualCapacityInfo"}, + "bonus_extra_capacity": {"key": "bonusExtraCapacity", "type": "int"}, + "endpoint_count": {"key": "endpointCount", "type": "int"}, + "requested_capacity": {"key": "requestedCapacity", "type": "int"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword actual_capacity_info: Gets or sets the actual capacity info for the group. :paramtype actual_capacity_info: ~azure.mgmt.machinelearningservices.models.ActualCapacityInfo @@ -16076,10 +15170,10 @@ def __init__( :paramtype requested_capacity: int """ super(GroupStatus, self).__init__(**kwargs) - self.actual_capacity_info = kwargs.get('actual_capacity_info', None) - self.bonus_extra_capacity = kwargs.get('bonus_extra_capacity', 0) - self.endpoint_count = kwargs.get('endpoint_count', 0) - self.requested_capacity = kwargs.get('requested_capacity', 0) + self.actual_capacity_info = kwargs.get("actual_capacity_info", None) + self.bonus_extra_capacity = kwargs.get("bonus_extra_capacity", 0) + self.endpoint_count = kwargs.get("endpoint_count", 0) + self.requested_capacity = kwargs.get("requested_capacity", 0) class HdfsDatastore(DatastoreProperties): @@ -16116,29 +15210,26 @@ class HdfsDatastore(DatastoreProperties): """ _validation = { - 'credentials': {'required': True}, - 'datastore_type': {'required': True}, - 'is_default': {'readonly': True}, - 'name_node_address': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "credentials": {"required": True}, + "datastore_type": {"required": True}, + "is_default": {"readonly": True}, + "name_node_address": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'credentials': {'key': 'credentials', 'type': 'DatastoreCredentials'}, - 'datastore_type': {'key': 'datastoreType', 'type': 'str'}, - 'intellectual_property': {'key': 'intellectualProperty', 'type': 'IntellectualProperty'}, - 'is_default': {'key': 'isDefault', 'type': 'bool'}, - 'hdfs_server_certificate': {'key': 'hdfsServerCertificate', 'type': 'str'}, - 'name_node_address': {'key': 'nameNodeAddress', 'type': 'str'}, - 'protocol': {'key': 'protocol', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "credentials": {"key": "credentials", "type": "DatastoreCredentials"}, + "datastore_type": {"key": "datastoreType", "type": "str"}, + "intellectual_property": {"key": "intellectualProperty", "type": "IntellectualProperty"}, + "is_default": {"key": "isDefault", "type": "bool"}, + "hdfs_server_certificate": {"key": "hdfsServerCertificate", "type": "str"}, + "name_node_address": {"key": "nameNodeAddress", "type": "str"}, + "protocol": {"key": "protocol", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword description: The asset description text. :paramtype description: str @@ -16160,10 +15251,10 @@ def __init__( :paramtype protocol: str """ super(HdfsDatastore, self).__init__(**kwargs) - self.datastore_type = 'Hdfs' # type: str - self.hdfs_server_certificate = kwargs.get('hdfs_server_certificate', None) - self.name_node_address = kwargs['name_node_address'] - self.protocol = kwargs.get('protocol', "http") + self.datastore_type = "Hdfs" # type: str + self.hdfs_server_certificate = kwargs.get("hdfs_server_certificate", None) + self.name_node_address = kwargs["name_node_address"] + self.protocol = kwargs.get("protocol", "http") class HDInsightSchema(msrest.serialization.Model): @@ -16174,19 +15265,16 @@ class HDInsightSchema(msrest.serialization.Model): """ _attribute_map = { - 'properties': {'key': 'properties', 'type': 'HDInsightProperties'}, + "properties": {"key": "properties", "type": "HDInsightProperties"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword properties: HDInsight compute properties. :paramtype properties: ~azure.mgmt.machinelearningservices.models.HDInsightProperties """ super(HDInsightSchema, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) + self.properties = kwargs.get("properties", None) class HDInsight(Compute, HDInsightSchema): @@ -16228,32 +15316,29 @@ class HDInsight(Compute, HDInsightSchema): """ _validation = { - 'compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, + "compute_type": {"required": True}, + "provisioning_state": {"readonly": True}, + "created_on": {"readonly": True}, + "modified_on": {"readonly": True}, + "provisioning_errors": {"readonly": True}, + "is_attached_compute": {"readonly": True}, } _attribute_map = { - 'properties': {'key': 'properties', 'type': 'HDInsightProperties'}, - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, + "properties": {"key": "properties", "type": "HDInsightProperties"}, + "compute_type": {"key": "computeType", "type": "str"}, + "compute_location": {"key": "computeLocation", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "created_on": {"key": "createdOn", "type": "iso-8601"}, + "modified_on": {"key": "modifiedOn", "type": "iso-8601"}, + "resource_id": {"key": "resourceId", "type": "str"}, + "provisioning_errors": {"key": "provisioningErrors", "type": "[ErrorResponse]"}, + "is_attached_compute": {"key": "isAttachedCompute", "type": "bool"}, + "disable_local_auth": {"key": "disableLocalAuth", "type": "bool"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword properties: HDInsight compute properties. :paramtype properties: ~azure.mgmt.machinelearningservices.models.HDInsightProperties @@ -16268,17 +15353,17 @@ def __init__( :paramtype disable_local_auth: bool """ super(HDInsight, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) - self.compute_type = 'HDInsight' # type: str - self.compute_location = kwargs.get('compute_location', None) + self.properties = kwargs.get("properties", None) + self.compute_type = "HDInsight" # type: str + self.compute_location = kwargs.get("compute_location", None) self.provisioning_state = None - self.description = kwargs.get('description', None) + self.description = kwargs.get("description", None) self.created_on = None self.modified_on = None - self.resource_id = kwargs.get('resource_id', None) + self.resource_id = kwargs.get("resource_id", None) self.provisioning_errors = None self.is_attached_compute = None - self.disable_local_auth = kwargs.get('disable_local_auth', None) + self.disable_local_auth = kwargs.get("disable_local_auth", None) class HDInsightProperties(msrest.serialization.Model): @@ -16294,15 +15379,12 @@ class HDInsightProperties(msrest.serialization.Model): """ _attribute_map = { - 'ssh_port': {'key': 'sshPort', 'type': 'int'}, - 'address': {'key': 'address', 'type': 'str'}, - 'administrator_account': {'key': 'administratorAccount', 'type': 'VirtualMachineSshCredentials'}, + "ssh_port": {"key": "sshPort", "type": "int"}, + "address": {"key": "address", "type": "str"}, + "administrator_account": {"key": "administratorAccount", "type": "VirtualMachineSshCredentials"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword ssh_port: Port open for ssh connections on the master node of the cluster. :paramtype ssh_port: int @@ -16313,9 +15395,9 @@ def __init__( ~azure.mgmt.machinelearningservices.models.VirtualMachineSshCredentials """ super(HDInsightProperties, self).__init__(**kwargs) - self.ssh_port = kwargs.get('ssh_port', None) - self.address = kwargs.get('address', None) - self.administrator_account = kwargs.get('administrator_account', None) + self.ssh_port = kwargs.get("ssh_port", None) + self.address = kwargs.get("address", None) + self.administrator_account = kwargs.get("administrator_account", None) class IdAssetReference(AssetReferenceBase): @@ -16331,26 +15413,23 @@ class IdAssetReference(AssetReferenceBase): """ _validation = { - 'reference_type': {'required': True}, - 'asset_id': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "reference_type": {"required": True}, + "asset_id": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'reference_type': {'key': 'referenceType', 'type': 'str'}, - 'asset_id': {'key': 'assetId', 'type': 'str'}, + "reference_type": {"key": "referenceType", "type": "str"}, + "asset_id": {"key": "assetId", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword asset_id: Required. [Required] ARM resource ID of the asset. :paramtype asset_id: str """ super(IdAssetReference, self).__init__(**kwargs) - self.reference_type = 'Id' # type: str - self.asset_id = kwargs['asset_id'] + self.reference_type = "Id" # type: str + self.asset_id = kwargs["asset_id"] class IdentityForCmk(msrest.serialization.Model): @@ -16362,20 +15441,17 @@ class IdentityForCmk(msrest.serialization.Model): """ _attribute_map = { - 'user_assigned_identity': {'key': 'userAssignedIdentity', 'type': 'str'}, + "user_assigned_identity": {"key": "userAssignedIdentity", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword user_assigned_identity: UserAssignedIdentity to be used to fetch the encryption key from keyVault. :paramtype user_assigned_identity: str """ super(IdentityForCmk, self).__init__(**kwargs) - self.user_assigned_identity = kwargs.get('user_assigned_identity', None) + self.user_assigned_identity = kwargs.get("user_assigned_identity", None) class IdleShutdownSetting(msrest.serialization.Model): @@ -16387,20 +15463,17 @@ class IdleShutdownSetting(msrest.serialization.Model): """ _attribute_map = { - 'idle_time_before_shutdown': {'key': 'idleTimeBeforeShutdown', 'type': 'str'}, + "idle_time_before_shutdown": {"key": "idleTimeBeforeShutdown", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword idle_time_before_shutdown: Time is defined in ISO8601 format. Minimum is 15 min, maximum is 3 days. :paramtype idle_time_before_shutdown: str """ super(IdleShutdownSetting, self).__init__(**kwargs) - self.idle_time_before_shutdown = kwargs.get('idle_time_before_shutdown', None) + self.idle_time_before_shutdown = kwargs.get("idle_time_before_shutdown", None) class Image(msrest.serialization.Model): @@ -16420,16 +15493,13 @@ class Image(msrest.serialization.Model): """ _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'reference': {'key': 'reference', 'type': 'str'}, - 'version': {'key': 'version', 'type': 'str'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "reference": {"key": "reference", "type": "str"}, + "version": {"key": "version", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. @@ -16444,46 +15514,43 @@ def __init__( :paramtype version: str """ super(Image, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = kwargs.get('type', "docker") - self.reference = kwargs.get('reference', None) - self.version = kwargs.get('version', None) + self.additional_properties = kwargs.get("additional_properties", None) + self.type = kwargs.get("type", "docker") + self.reference = kwargs.get("reference", None) + self.version = kwargs.get("version", None) class ImageVertical(msrest.serialization.Model): """Abstract class for AutoML tasks that train image (computer vision) models - -such as Image Classification / Image Classification Multilabel / Image Object Detection / Image Instance Segmentation. + such as Image Classification / Image Classification Multilabel / Image Object Detection / Image Instance Segmentation. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to Azure. - :ivar limit_settings: Required. [Required] Limit settings for the AutoML job. - :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings - :ivar sweep_settings: Model sweeping and hyperparameter sweeping related settings. - :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings - :ivar validation_data: Validation data inputs. - :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :ivar validation_data_size: The fraction of training dataset that needs to be set aside for - validation purpose. - Values between (0.0 , 1.0) - Applied when validation dataset is not provided. - :vartype validation_data_size: float + :ivar limit_settings: Required. [Required] Limit settings for the AutoML job. + :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings + :ivar sweep_settings: Model sweeping and hyperparameter sweeping related settings. + :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings + :ivar validation_data: Validation data inputs. + :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar validation_data_size: The fraction of training dataset that needs to be set aside for + validation purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :vartype validation_data_size: float """ _validation = { - 'limit_settings': {'required': True}, + "limit_settings": {"required": True}, } _attribute_map = { - 'limit_settings': {'key': 'limitSettings', 'type': 'ImageLimitSettings'}, - 'sweep_settings': {'key': 'sweepSettings', 'type': 'ImageSweepSettings'}, - 'validation_data': {'key': 'validationData', 'type': 'MLTableJobInput'}, - 'validation_data_size': {'key': 'validationDataSize', 'type': 'float'}, + "limit_settings": {"key": "limitSettings", "type": "ImageLimitSettings"}, + "sweep_settings": {"key": "sweepSettings", "type": "ImageSweepSettings"}, + "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, + "validation_data_size": {"key": "validationDataSize", "type": "float"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword limit_settings: Required. [Required] Limit settings for the AutoML job. :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings @@ -16498,10 +15565,10 @@ def __init__( :paramtype validation_data_size: float """ super(ImageVertical, self).__init__(**kwargs) - self.limit_settings = kwargs['limit_settings'] - self.sweep_settings = kwargs.get('sweep_settings', None) - self.validation_data = kwargs.get('validation_data', None) - self.validation_data_size = kwargs.get('validation_data_size', None) + self.limit_settings = kwargs["limit_settings"] + self.sweep_settings = kwargs.get("sweep_settings", None) + self.validation_data = kwargs.get("validation_data", None) + self.validation_data_size = kwargs.get("validation_data_size", None) class ImageClassificationBase(ImageVertical): @@ -16530,22 +15597,19 @@ class ImageClassificationBase(ImageVertical): """ _validation = { - 'limit_settings': {'required': True}, + "limit_settings": {"required": True}, } _attribute_map = { - 'limit_settings': {'key': 'limitSettings', 'type': 'ImageLimitSettings'}, - 'sweep_settings': {'key': 'sweepSettings', 'type': 'ImageSweepSettings'}, - 'validation_data': {'key': 'validationData', 'type': 'MLTableJobInput'}, - 'validation_data_size': {'key': 'validationDataSize', 'type': 'float'}, - 'model_settings': {'key': 'modelSettings', 'type': 'ImageModelSettingsClassification'}, - 'search_space': {'key': 'searchSpace', 'type': '[ImageModelDistributionSettingsClassification]'}, + "limit_settings": {"key": "limitSettings", "type": "ImageLimitSettings"}, + "sweep_settings": {"key": "sweepSettings", "type": "ImageSweepSettings"}, + "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, + "validation_data_size": {"key": "validationDataSize", "type": "float"}, + "model_settings": {"key": "modelSettings", "type": "ImageModelSettingsClassification"}, + "search_space": {"key": "searchSpace", "type": "[ImageModelDistributionSettingsClassification]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword limit_settings: Required. [Required] Limit settings for the AutoML job. :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings @@ -16567,78 +15631,75 @@ def __init__( list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsClassification] """ super(ImageClassificationBase, self).__init__(**kwargs) - self.model_settings = kwargs.get('model_settings', None) - self.search_space = kwargs.get('search_space', None) + self.model_settings = kwargs.get("model_settings", None) + self.search_space = kwargs.get("search_space", None) class ImageClassification(AutoMLVertical, ImageClassificationBase): """Image Classification. Multi-class image classification is used when an image is classified with only a single label -from a set of classes - e.g. each image is classified as either an image of a 'cat' or a 'dog' or a 'duck'. + from a set of classes - e.g. each image is classified as either an image of a 'cat' or a 'dog' or a 'duck'. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to Azure. - :ivar limit_settings: Required. [Required] Limit settings for the AutoML job. - :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings - :ivar sweep_settings: Model sweeping and hyperparameter sweeping related settings. - :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings - :ivar validation_data: Validation data inputs. - :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :ivar validation_data_size: The fraction of training dataset that needs to be set aside for - validation purpose. - Values between (0.0 , 1.0) - Applied when validation dataset is not provided. - :vartype validation_data_size: float - :ivar model_settings: Settings used for training the model. - :vartype model_settings: - ~azure.mgmt.machinelearningservices.models.ImageModelSettingsClassification - :ivar search_space: Search space for sampling different combinations of models and their - hyperparameters. - :vartype search_space: - list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsClassification] - :ivar log_verbosity: Log verbosity for the job. Possible values include: "NotSet", "Debug", - "Info", "Warning", "Error", "Critical". - :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity - :ivar target_column_name: Target column name: This is prediction values column. - Also known as label column name in context of classification tasks. - :vartype target_column_name: str - :ivar task_type: Required. [Required] Task type for AutoMLJob.Constant filled by server. - Possible values include: "Classification", "Regression", "Forecasting", "ImageClassification", - "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", - "TextClassification", "TextClassificationMultilabel", "TextNER". - :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType - :ivar training_data: Required. [Required] Training data input. - :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :ivar primary_metric: Primary metric to optimize for this task. Possible values include: - "AUCWeighted", "Accuracy", "NormMacroRecall", "AveragePrecisionScoreWeighted", - "PrecisionScoreWeighted". - :vartype primary_metric: str or - ~azure.mgmt.machinelearningservices.models.ClassificationPrimaryMetrics + :ivar limit_settings: Required. [Required] Limit settings for the AutoML job. + :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings + :ivar sweep_settings: Model sweeping and hyperparameter sweeping related settings. + :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings + :ivar validation_data: Validation data inputs. + :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar validation_data_size: The fraction of training dataset that needs to be set aside for + validation purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :vartype validation_data_size: float + :ivar model_settings: Settings used for training the model. + :vartype model_settings: + ~azure.mgmt.machinelearningservices.models.ImageModelSettingsClassification + :ivar search_space: Search space for sampling different combinations of models and their + hyperparameters. + :vartype search_space: + list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsClassification] + :ivar log_verbosity: Log verbosity for the job. Possible values include: "NotSet", "Debug", + "Info", "Warning", "Error", "Critical". + :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :ivar target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :vartype target_column_name: str + :ivar task_type: Required. [Required] Task type for AutoMLJob.Constant filled by server. + Possible values include: "Classification", "Regression", "Forecasting", "ImageClassification", + "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", + "TextClassification", "TextClassificationMultilabel", "TextNER". + :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType + :ivar training_data: Required. [Required] Training data input. + :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar primary_metric: Primary metric to optimize for this task. Possible values include: + "AUCWeighted", "Accuracy", "NormMacroRecall", "AveragePrecisionScoreWeighted", + "PrecisionScoreWeighted". + :vartype primary_metric: str or + ~azure.mgmt.machinelearningservices.models.ClassificationPrimaryMetrics """ _validation = { - 'limit_settings': {'required': True}, - 'task_type': {'required': True}, - 'training_data': {'required': True}, + "limit_settings": {"required": True}, + "task_type": {"required": True}, + "training_data": {"required": True}, } _attribute_map = { - 'limit_settings': {'key': 'limitSettings', 'type': 'ImageLimitSettings'}, - 'sweep_settings': {'key': 'sweepSettings', 'type': 'ImageSweepSettings'}, - 'validation_data': {'key': 'validationData', 'type': 'MLTableJobInput'}, - 'validation_data_size': {'key': 'validationDataSize', 'type': 'float'}, - 'model_settings': {'key': 'modelSettings', 'type': 'ImageModelSettingsClassification'}, - 'search_space': {'key': 'searchSpace', 'type': '[ImageModelDistributionSettingsClassification]'}, - 'log_verbosity': {'key': 'logVerbosity', 'type': 'str'}, - 'target_column_name': {'key': 'targetColumnName', 'type': 'str'}, - 'task_type': {'key': 'taskType', 'type': 'str'}, - 'training_data': {'key': 'trainingData', 'type': 'MLTableJobInput'}, - 'primary_metric': {'key': 'primaryMetric', 'type': 'str'}, + "limit_settings": {"key": "limitSettings", "type": "ImageLimitSettings"}, + "sweep_settings": {"key": "sweepSettings", "type": "ImageSweepSettings"}, + "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, + "validation_data_size": {"key": "validationDataSize", "type": "float"}, + "model_settings": {"key": "modelSettings", "type": "ImageModelSettingsClassification"}, + "search_space": {"key": "searchSpace", "type": "[ImageModelDistributionSettingsClassification]"}, + "log_verbosity": {"key": "logVerbosity", "type": "str"}, + "target_column_name": {"key": "targetColumnName", "type": "str"}, + "task_type": {"key": "taskType", "type": "str"}, + "training_data": {"key": "trainingData", "type": "MLTableJobInput"}, + "primary_metric": {"key": "primaryMetric", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword limit_settings: Required. [Required] Limit settings for the AutoML job. :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings @@ -16673,87 +15734,84 @@ def __init__( ~azure.mgmt.machinelearningservices.models.ClassificationPrimaryMetrics """ super(ImageClassification, self).__init__(**kwargs) - self.limit_settings = kwargs['limit_settings'] - self.sweep_settings = kwargs.get('sweep_settings', None) - self.validation_data = kwargs.get('validation_data', None) - self.validation_data_size = kwargs.get('validation_data_size', None) - self.model_settings = kwargs.get('model_settings', None) - self.search_space = kwargs.get('search_space', None) - self.task_type = 'ImageClassification' # type: str - self.primary_metric = kwargs.get('primary_metric', None) - self.log_verbosity = kwargs.get('log_verbosity', None) - self.target_column_name = kwargs.get('target_column_name', None) - self.training_data = kwargs['training_data'] + self.limit_settings = kwargs["limit_settings"] + self.sweep_settings = kwargs.get("sweep_settings", None) + self.validation_data = kwargs.get("validation_data", None) + self.validation_data_size = kwargs.get("validation_data_size", None) + self.model_settings = kwargs.get("model_settings", None) + self.search_space = kwargs.get("search_space", None) + self.task_type = "ImageClassification" # type: str + self.primary_metric = kwargs.get("primary_metric", None) + self.log_verbosity = kwargs.get("log_verbosity", None) + self.target_column_name = kwargs.get("target_column_name", None) + self.training_data = kwargs["training_data"] class ImageClassificationMultilabel(AutoMLVertical, ImageClassificationBase): """Image Classification Multilabel. Multi-label image classification is used when an image could have one or more labels -from a set of labels - e.g. an image could be labeled with both 'cat' and 'dog'. + from a set of labels - e.g. an image could be labeled with both 'cat' and 'dog'. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to Azure. - :ivar limit_settings: Required. [Required] Limit settings for the AutoML job. - :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings - :ivar sweep_settings: Model sweeping and hyperparameter sweeping related settings. - :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings - :ivar validation_data: Validation data inputs. - :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :ivar validation_data_size: The fraction of training dataset that needs to be set aside for - validation purpose. - Values between (0.0 , 1.0) - Applied when validation dataset is not provided. - :vartype validation_data_size: float - :ivar model_settings: Settings used for training the model. - :vartype model_settings: - ~azure.mgmt.machinelearningservices.models.ImageModelSettingsClassification - :ivar search_space: Search space for sampling different combinations of models and their - hyperparameters. - :vartype search_space: - list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsClassification] - :ivar log_verbosity: Log verbosity for the job. Possible values include: "NotSet", "Debug", - "Info", "Warning", "Error", "Critical". - :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity - :ivar target_column_name: Target column name: This is prediction values column. - Also known as label column name in context of classification tasks. - :vartype target_column_name: str - :ivar task_type: Required. [Required] Task type for AutoMLJob.Constant filled by server. - Possible values include: "Classification", "Regression", "Forecasting", "ImageClassification", - "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", - "TextClassification", "TextClassificationMultilabel", "TextNER". - :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType - :ivar training_data: Required. [Required] Training data input. - :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :ivar primary_metric: Primary metric to optimize for this task. Possible values include: - "AUCWeighted", "Accuracy", "NormMacroRecall", "AveragePrecisionScoreWeighted", - "PrecisionScoreWeighted", "IOU". - :vartype primary_metric: str or - ~azure.mgmt.machinelearningservices.models.ClassificationMultilabelPrimaryMetrics + :ivar limit_settings: Required. [Required] Limit settings for the AutoML job. + :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings + :ivar sweep_settings: Model sweeping and hyperparameter sweeping related settings. + :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings + :ivar validation_data: Validation data inputs. + :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar validation_data_size: The fraction of training dataset that needs to be set aside for + validation purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :vartype validation_data_size: float + :ivar model_settings: Settings used for training the model. + :vartype model_settings: + ~azure.mgmt.machinelearningservices.models.ImageModelSettingsClassification + :ivar search_space: Search space for sampling different combinations of models and their + hyperparameters. + :vartype search_space: + list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsClassification] + :ivar log_verbosity: Log verbosity for the job. Possible values include: "NotSet", "Debug", + "Info", "Warning", "Error", "Critical". + :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :ivar target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :vartype target_column_name: str + :ivar task_type: Required. [Required] Task type for AutoMLJob.Constant filled by server. + Possible values include: "Classification", "Regression", "Forecasting", "ImageClassification", + "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", + "TextClassification", "TextClassificationMultilabel", "TextNER". + :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType + :ivar training_data: Required. [Required] Training data input. + :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar primary_metric: Primary metric to optimize for this task. Possible values include: + "AUCWeighted", "Accuracy", "NormMacroRecall", "AveragePrecisionScoreWeighted", + "PrecisionScoreWeighted", "IOU". + :vartype primary_metric: str or + ~azure.mgmt.machinelearningservices.models.ClassificationMultilabelPrimaryMetrics """ _validation = { - 'limit_settings': {'required': True}, - 'task_type': {'required': True}, - 'training_data': {'required': True}, + "limit_settings": {"required": True}, + "task_type": {"required": True}, + "training_data": {"required": True}, } _attribute_map = { - 'limit_settings': {'key': 'limitSettings', 'type': 'ImageLimitSettings'}, - 'sweep_settings': {'key': 'sweepSettings', 'type': 'ImageSweepSettings'}, - 'validation_data': {'key': 'validationData', 'type': 'MLTableJobInput'}, - 'validation_data_size': {'key': 'validationDataSize', 'type': 'float'}, - 'model_settings': {'key': 'modelSettings', 'type': 'ImageModelSettingsClassification'}, - 'search_space': {'key': 'searchSpace', 'type': '[ImageModelDistributionSettingsClassification]'}, - 'log_verbosity': {'key': 'logVerbosity', 'type': 'str'}, - 'target_column_name': {'key': 'targetColumnName', 'type': 'str'}, - 'task_type': {'key': 'taskType', 'type': 'str'}, - 'training_data': {'key': 'trainingData', 'type': 'MLTableJobInput'}, - 'primary_metric': {'key': 'primaryMetric', 'type': 'str'}, + "limit_settings": {"key": "limitSettings", "type": "ImageLimitSettings"}, + "sweep_settings": {"key": "sweepSettings", "type": "ImageSweepSettings"}, + "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, + "validation_data_size": {"key": "validationDataSize", "type": "float"}, + "model_settings": {"key": "modelSettings", "type": "ImageModelSettingsClassification"}, + "search_space": {"key": "searchSpace", "type": "[ImageModelDistributionSettingsClassification]"}, + "log_verbosity": {"key": "logVerbosity", "type": "str"}, + "target_column_name": {"key": "targetColumnName", "type": "str"}, + "task_type": {"key": "taskType", "type": "str"}, + "training_data": {"key": "trainingData", "type": "MLTableJobInput"}, + "primary_metric": {"key": "primaryMetric", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword limit_settings: Required. [Required] Limit settings for the AutoML job. :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings @@ -16788,17 +15846,17 @@ def __init__( ~azure.mgmt.machinelearningservices.models.ClassificationMultilabelPrimaryMetrics """ super(ImageClassificationMultilabel, self).__init__(**kwargs) - self.limit_settings = kwargs['limit_settings'] - self.sweep_settings = kwargs.get('sweep_settings', None) - self.validation_data = kwargs.get('validation_data', None) - self.validation_data_size = kwargs.get('validation_data_size', None) - self.model_settings = kwargs.get('model_settings', None) - self.search_space = kwargs.get('search_space', None) - self.task_type = 'ImageClassificationMultilabel' # type: str - self.primary_metric = kwargs.get('primary_metric', None) - self.log_verbosity = kwargs.get('log_verbosity', None) - self.target_column_name = kwargs.get('target_column_name', None) - self.training_data = kwargs['training_data'] + self.limit_settings = kwargs["limit_settings"] + self.sweep_settings = kwargs.get("sweep_settings", None) + self.validation_data = kwargs.get("validation_data", None) + self.validation_data_size = kwargs.get("validation_data_size", None) + self.model_settings = kwargs.get("model_settings", None) + self.search_space = kwargs.get("search_space", None) + self.task_type = "ImageClassificationMultilabel" # type: str + self.primary_metric = kwargs.get("primary_metric", None) + self.log_verbosity = kwargs.get("log_verbosity", None) + self.target_column_name = kwargs.get("target_column_name", None) + self.training_data = kwargs["training_data"] class ImageObjectDetectionBase(ImageVertical): @@ -16827,22 +15885,19 @@ class ImageObjectDetectionBase(ImageVertical): """ _validation = { - 'limit_settings': {'required': True}, + "limit_settings": {"required": True}, } _attribute_map = { - 'limit_settings': {'key': 'limitSettings', 'type': 'ImageLimitSettings'}, - 'sweep_settings': {'key': 'sweepSettings', 'type': 'ImageSweepSettings'}, - 'validation_data': {'key': 'validationData', 'type': 'MLTableJobInput'}, - 'validation_data_size': {'key': 'validationDataSize', 'type': 'float'}, - 'model_settings': {'key': 'modelSettings', 'type': 'ImageModelSettingsObjectDetection'}, - 'search_space': {'key': 'searchSpace', 'type': '[ImageModelDistributionSettingsObjectDetection]'}, + "limit_settings": {"key": "limitSettings", "type": "ImageLimitSettings"}, + "sweep_settings": {"key": "sweepSettings", "type": "ImageSweepSettings"}, + "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, + "validation_data_size": {"key": "validationDataSize", "type": "float"}, + "model_settings": {"key": "modelSettings", "type": "ImageModelSettingsObjectDetection"}, + "search_space": {"key": "searchSpace", "type": "[ImageModelDistributionSettingsObjectDetection]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword limit_settings: Required. [Required] Limit settings for the AutoML job. :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings @@ -16864,77 +15919,74 @@ def __init__( list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsObjectDetection] """ super(ImageObjectDetectionBase, self).__init__(**kwargs) - self.model_settings = kwargs.get('model_settings', None) - self.search_space = kwargs.get('search_space', None) + self.model_settings = kwargs.get("model_settings", None) + self.search_space = kwargs.get("search_space", None) class ImageInstanceSegmentation(AutoMLVertical, ImageObjectDetectionBase): """Image Instance Segmentation. Instance segmentation is used to identify objects in an image at the pixel level, -drawing a polygon around each object in the image. + drawing a polygon around each object in the image. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to Azure. - :ivar limit_settings: Required. [Required] Limit settings for the AutoML job. - :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings - :ivar sweep_settings: Model sweeping and hyperparameter sweeping related settings. - :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings - :ivar validation_data: Validation data inputs. - :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :ivar validation_data_size: The fraction of training dataset that needs to be set aside for - validation purpose. - Values between (0.0 , 1.0) - Applied when validation dataset is not provided. - :vartype validation_data_size: float - :ivar model_settings: Settings used for training the model. - :vartype model_settings: - ~azure.mgmt.machinelearningservices.models.ImageModelSettingsObjectDetection - :ivar search_space: Search space for sampling different combinations of models and their - hyperparameters. - :vartype search_space: - list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsObjectDetection] - :ivar log_verbosity: Log verbosity for the job. Possible values include: "NotSet", "Debug", - "Info", "Warning", "Error", "Critical". - :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity - :ivar target_column_name: Target column name: This is prediction values column. - Also known as label column name in context of classification tasks. - :vartype target_column_name: str - :ivar task_type: Required. [Required] Task type for AutoMLJob.Constant filled by server. - Possible values include: "Classification", "Regression", "Forecasting", "ImageClassification", - "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", - "TextClassification", "TextClassificationMultilabel", "TextNER". - :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType - :ivar training_data: Required. [Required] Training data input. - :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :ivar primary_metric: Primary metric to optimize for this task. Possible values include: - "MeanAveragePrecision". - :vartype primary_metric: str or - ~azure.mgmt.machinelearningservices.models.InstanceSegmentationPrimaryMetrics + :ivar limit_settings: Required. [Required] Limit settings for the AutoML job. + :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings + :ivar sweep_settings: Model sweeping and hyperparameter sweeping related settings. + :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings + :ivar validation_data: Validation data inputs. + :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar validation_data_size: The fraction of training dataset that needs to be set aside for + validation purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :vartype validation_data_size: float + :ivar model_settings: Settings used for training the model. + :vartype model_settings: + ~azure.mgmt.machinelearningservices.models.ImageModelSettingsObjectDetection + :ivar search_space: Search space for sampling different combinations of models and their + hyperparameters. + :vartype search_space: + list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsObjectDetection] + :ivar log_verbosity: Log verbosity for the job. Possible values include: "NotSet", "Debug", + "Info", "Warning", "Error", "Critical". + :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :ivar target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :vartype target_column_name: str + :ivar task_type: Required. [Required] Task type for AutoMLJob.Constant filled by server. + Possible values include: "Classification", "Regression", "Forecasting", "ImageClassification", + "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", + "TextClassification", "TextClassificationMultilabel", "TextNER". + :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType + :ivar training_data: Required. [Required] Training data input. + :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar primary_metric: Primary metric to optimize for this task. Possible values include: + "MeanAveragePrecision". + :vartype primary_metric: str or + ~azure.mgmt.machinelearningservices.models.InstanceSegmentationPrimaryMetrics """ _validation = { - 'limit_settings': {'required': True}, - 'task_type': {'required': True}, - 'training_data': {'required': True}, + "limit_settings": {"required": True}, + "task_type": {"required": True}, + "training_data": {"required": True}, } _attribute_map = { - 'limit_settings': {'key': 'limitSettings', 'type': 'ImageLimitSettings'}, - 'sweep_settings': {'key': 'sweepSettings', 'type': 'ImageSweepSettings'}, - 'validation_data': {'key': 'validationData', 'type': 'MLTableJobInput'}, - 'validation_data_size': {'key': 'validationDataSize', 'type': 'float'}, - 'model_settings': {'key': 'modelSettings', 'type': 'ImageModelSettingsObjectDetection'}, - 'search_space': {'key': 'searchSpace', 'type': '[ImageModelDistributionSettingsObjectDetection]'}, - 'log_verbosity': {'key': 'logVerbosity', 'type': 'str'}, - 'target_column_name': {'key': 'targetColumnName', 'type': 'str'}, - 'task_type': {'key': 'taskType', 'type': 'str'}, - 'training_data': {'key': 'trainingData', 'type': 'MLTableJobInput'}, - 'primary_metric': {'key': 'primaryMetric', 'type': 'str'}, + "limit_settings": {"key": "limitSettings", "type": "ImageLimitSettings"}, + "sweep_settings": {"key": "sweepSettings", "type": "ImageSweepSettings"}, + "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, + "validation_data_size": {"key": "validationDataSize", "type": "float"}, + "model_settings": {"key": "modelSettings", "type": "ImageModelSettingsObjectDetection"}, + "search_space": {"key": "searchSpace", "type": "[ImageModelDistributionSettingsObjectDetection]"}, + "log_verbosity": {"key": "logVerbosity", "type": "str"}, + "target_column_name": {"key": "targetColumnName", "type": "str"}, + "task_type": {"key": "taskType", "type": "str"}, + "training_data": {"key": "trainingData", "type": "MLTableJobInput"}, + "primary_metric": {"key": "primaryMetric", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword limit_settings: Required. [Required] Limit settings for the AutoML job. :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings @@ -16968,17 +16020,17 @@ def __init__( ~azure.mgmt.machinelearningservices.models.InstanceSegmentationPrimaryMetrics """ super(ImageInstanceSegmentation, self).__init__(**kwargs) - self.limit_settings = kwargs['limit_settings'] - self.sweep_settings = kwargs.get('sweep_settings', None) - self.validation_data = kwargs.get('validation_data', None) - self.validation_data_size = kwargs.get('validation_data_size', None) - self.model_settings = kwargs.get('model_settings', None) - self.search_space = kwargs.get('search_space', None) - self.task_type = 'ImageInstanceSegmentation' # type: str - self.primary_metric = kwargs.get('primary_metric', None) - self.log_verbosity = kwargs.get('log_verbosity', None) - self.target_column_name = kwargs.get('target_column_name', None) - self.training_data = kwargs['training_data'] + self.limit_settings = kwargs["limit_settings"] + self.sweep_settings = kwargs.get("sweep_settings", None) + self.validation_data = kwargs.get("validation_data", None) + self.validation_data_size = kwargs.get("validation_data_size", None) + self.model_settings = kwargs.get("model_settings", None) + self.search_space = kwargs.get("search_space", None) + self.task_type = "ImageInstanceSegmentation" # type: str + self.primary_metric = kwargs.get("primary_metric", None) + self.log_verbosity = kwargs.get("log_verbosity", None) + self.target_column_name = kwargs.get("target_column_name", None) + self.training_data = kwargs["training_data"] class ImageLimitSettings(msrest.serialization.Model): @@ -16993,15 +16045,12 @@ class ImageLimitSettings(msrest.serialization.Model): """ _attribute_map = { - 'max_concurrent_trials': {'key': 'maxConcurrentTrials', 'type': 'int'}, - 'max_trials': {'key': 'maxTrials', 'type': 'int'}, - 'timeout': {'key': 'timeout', 'type': 'duration'}, + "max_concurrent_trials": {"key": "maxConcurrentTrials", "type": "int"}, + "max_trials": {"key": "maxTrials", "type": "int"}, + "timeout": {"key": "timeout", "type": "duration"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword max_concurrent_trials: Maximum number of concurrent AutoML iterations. :paramtype max_concurrent_trials: int @@ -17011,9 +16060,9 @@ def __init__( :paramtype timeout: ~datetime.timedelta """ super(ImageLimitSettings, self).__init__(**kwargs) - self.max_concurrent_trials = kwargs.get('max_concurrent_trials', 1) - self.max_trials = kwargs.get('max_trials', 1) - self.timeout = kwargs.get('timeout', "P7D") + self.max_concurrent_trials = kwargs.get("max_concurrent_trials", 1) + self.max_trials = kwargs.get("max_trials", 1) + self.timeout = kwargs.get("timeout", "P7D") class ImageMetadata(msrest.serialization.Model): @@ -17034,20 +16083,17 @@ class ImageMetadata(msrest.serialization.Model): """ _validation = { - 'os_patching_status': {'readonly': True}, + "os_patching_status": {"readonly": True}, } _attribute_map = { - 'current_image_version': {'key': 'currentImageVersion', 'type': 'str'}, - 'latest_image_version': {'key': 'latestImageVersion', 'type': 'str'}, - 'is_latest_os_image_version': {'key': 'isLatestOsImageVersion', 'type': 'bool'}, - 'os_patching_status': {'key': 'osPatchingStatus', 'type': 'OsPatchingStatus'}, + "current_image_version": {"key": "currentImageVersion", "type": "str"}, + "latest_image_version": {"key": "latestImageVersion", "type": "str"}, + "is_latest_os_image_version": {"key": "isLatestOsImageVersion", "type": "bool"}, + "os_patching_status": {"key": "osPatchingStatus", "type": "OsPatchingStatus"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword current_image_version: Specifies the current operating system image version this compute instance is running on. @@ -17059,144 +16105,141 @@ def __init__( :paramtype is_latest_os_image_version: bool """ super(ImageMetadata, self).__init__(**kwargs) - self.current_image_version = kwargs.get('current_image_version', None) - self.latest_image_version = kwargs.get('latest_image_version', None) - self.is_latest_os_image_version = kwargs.get('is_latest_os_image_version', None) + self.current_image_version = kwargs.get("current_image_version", None) + self.latest_image_version = kwargs.get("latest_image_version", None) + self.is_latest_os_image_version = kwargs.get("is_latest_os_image_version", None) self.os_patching_status = None class ImageModelDistributionSettings(msrest.serialization.Model): """Distribution expressions to sweep over values of model settings. -:code:` -Some examples are: -``` -ModelName = "choice('seresnext', 'resnest50')"; -LearningRate = "uniform(0.001, 0.01)"; -LayersToFreeze = "choice(0, 2)"; -```` -All distributions can be specified as distribution_name(min, max) or choice(val1, val2, ..., valn) -where distribution name can be: uniform, quniform, loguniform, etc -For more details on how to compose distribution expressions please check the documentation: -https://docs.microsoft.com/en-us/azure/machine-learning/how-to-tune-hyperparameters -For more information on the available settings please visit the official documentation: -https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - - :ivar ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. - :vartype ams_gradient: str - :ivar augmentations: Settings for using Augmentations. - :vartype augmentations: str - :ivar beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the range - [0, 1]. - :vartype beta1: str - :ivar beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the range - [0, 1]. - :vartype beta2: str - :ivar distributed: Whether to use distributer training. - :vartype distributed: str - :ivar early_stopping: Enable early stopping logic during training. - :vartype early_stopping: str - :ivar early_stopping_delay: Minimum number of epochs or validation evaluations to wait before - primary metric improvement - is tracked for early stopping. Must be a positive integer. - :vartype early_stopping_delay: str - :ivar early_stopping_patience: Minimum number of epochs or validation evaluations with no - primary metric improvement before - the run is stopped. Must be a positive integer. - :vartype early_stopping_patience: str - :ivar enable_onnx_normalization: Enable normalization when exporting ONNX model. - :vartype enable_onnx_normalization: str - :ivar evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. Must - be a positive integer. - :vartype evaluation_frequency: str - :ivar gradient_accumulation_step: Gradient accumulation means running a configured number of - "GradAccumulationStep" steps without - updating the model weights while accumulating the gradients of those steps, and then using - the accumulated gradients to compute the weight updates. Must be a positive integer. - :vartype gradient_accumulation_step: str - :ivar layers_to_freeze: Number of layers to freeze for the model. Must be a positive integer. - For instance, passing 2 as value for 'seresnext' means - freezing layer0 and layer1. For a full list of models supported and details on layer freeze, - please - see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - :vartype layers_to_freeze: str - :ivar learning_rate: Initial learning rate. Must be a float in the range [0, 1]. - :vartype learning_rate: str - :ivar learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or - 'step'. - :vartype learning_rate_scheduler: str - :ivar model_name: Name of the model to use for training. - For more information on the available models please visit the official documentation: - https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - :vartype model_name: str - :ivar momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, 1]. - :vartype momentum: str - :ivar nesterov: Enable nesterov when optimizer is 'sgd'. - :vartype nesterov: str - :ivar number_of_epochs: Number of training epochs. Must be a positive integer. - :vartype number_of_epochs: str - :ivar number_of_workers: Number of data loader workers. Must be a non-negative integer. - :vartype number_of_workers: str - :ivar optimizer: Type of optimizer. Must be either 'sgd', 'adam', or 'adamw'. - :vartype optimizer: str - :ivar random_seed: Random seed to be used when using deterministic training. - :vartype random_seed: str - :ivar step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float in - the range [0, 1]. - :vartype step_lr_gamma: str - :ivar step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be a - positive integer. - :vartype step_lr_step_size: str - :ivar training_batch_size: Training batch size. Must be a positive integer. - :vartype training_batch_size: str - :ivar validation_batch_size: Validation batch size. Must be a positive integer. - :vartype validation_batch_size: str - :ivar warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is - 'warmup_cosine'. Must be a float in the range [0, 1]. - :vartype warmup_cosine_lr_cycles: str - :ivar warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is - 'warmup_cosine'. Must be a positive integer. - :vartype warmup_cosine_lr_warmup_epochs: str - :ivar weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must be - a float in the range[0, 1]. - :vartype weight_decay: str - """ - - _attribute_map = { - 'ams_gradient': {'key': 'amsGradient', 'type': 'str'}, - 'augmentations': {'key': 'augmentations', 'type': 'str'}, - 'beta1': {'key': 'beta1', 'type': 'str'}, - 'beta2': {'key': 'beta2', 'type': 'str'}, - 'distributed': {'key': 'distributed', 'type': 'str'}, - 'early_stopping': {'key': 'earlyStopping', 'type': 'str'}, - 'early_stopping_delay': {'key': 'earlyStoppingDelay', 'type': 'str'}, - 'early_stopping_patience': {'key': 'earlyStoppingPatience', 'type': 'str'}, - 'enable_onnx_normalization': {'key': 'enableOnnxNormalization', 'type': 'str'}, - 'evaluation_frequency': {'key': 'evaluationFrequency', 'type': 'str'}, - 'gradient_accumulation_step': {'key': 'gradientAccumulationStep', 'type': 'str'}, - 'layers_to_freeze': {'key': 'layersToFreeze', 'type': 'str'}, - 'learning_rate': {'key': 'learningRate', 'type': 'str'}, - 'learning_rate_scheduler': {'key': 'learningRateScheduler', 'type': 'str'}, - 'model_name': {'key': 'modelName', 'type': 'str'}, - 'momentum': {'key': 'momentum', 'type': 'str'}, - 'nesterov': {'key': 'nesterov', 'type': 'str'}, - 'number_of_epochs': {'key': 'numberOfEpochs', 'type': 'str'}, - 'number_of_workers': {'key': 'numberOfWorkers', 'type': 'str'}, - 'optimizer': {'key': 'optimizer', 'type': 'str'}, - 'random_seed': {'key': 'randomSeed', 'type': 'str'}, - 'step_lr_gamma': {'key': 'stepLRGamma', 'type': 'str'}, - 'step_lr_step_size': {'key': 'stepLRStepSize', 'type': 'str'}, - 'training_batch_size': {'key': 'trainingBatchSize', 'type': 'str'}, - 'validation_batch_size': {'key': 'validationBatchSize', 'type': 'str'}, - 'warmup_cosine_lr_cycles': {'key': 'warmupCosineLRCycles', 'type': 'str'}, - 'warmup_cosine_lr_warmup_epochs': {'key': 'warmupCosineLRWarmupEpochs', 'type': 'str'}, - 'weight_decay': {'key': 'weightDecay', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): + :code:` + Some examples are: + ``` + ModelName = "choice('seresnext', 'resnest50')"; + LearningRate = "uniform(0.001, 0.01)"; + LayersToFreeze = "choice(0, 2)"; + ```` + All distributions can be specified as distribution_name(min, max) or choice(val1, val2, ..., valn) + where distribution name can be: uniform, quniform, loguniform, etc + For more details on how to compose distribution expressions please check the documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-tune-hyperparameters + For more information on the available settings please visit the official documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + + :ivar ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. + :vartype ams_gradient: str + :ivar augmentations: Settings for using Augmentations. + :vartype augmentations: str + :ivar beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the range + [0, 1]. + :vartype beta1: str + :ivar beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the range + [0, 1]. + :vartype beta2: str + :ivar distributed: Whether to use distributer training. + :vartype distributed: str + :ivar early_stopping: Enable early stopping logic during training. + :vartype early_stopping: str + :ivar early_stopping_delay: Minimum number of epochs or validation evaluations to wait before + primary metric improvement + is tracked for early stopping. Must be a positive integer. + :vartype early_stopping_delay: str + :ivar early_stopping_patience: Minimum number of epochs or validation evaluations with no + primary metric improvement before + the run is stopped. Must be a positive integer. + :vartype early_stopping_patience: str + :ivar enable_onnx_normalization: Enable normalization when exporting ONNX model. + :vartype enable_onnx_normalization: str + :ivar evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. Must + be a positive integer. + :vartype evaluation_frequency: str + :ivar gradient_accumulation_step: Gradient accumulation means running a configured number of + "GradAccumulationStep" steps without + updating the model weights while accumulating the gradients of those steps, and then using + the accumulated gradients to compute the weight updates. Must be a positive integer. + :vartype gradient_accumulation_step: str + :ivar layers_to_freeze: Number of layers to freeze for the model. Must be a positive integer. + For instance, passing 2 as value for 'seresnext' means + freezing layer0 and layer1. For a full list of models supported and details on layer freeze, + please + see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :vartype layers_to_freeze: str + :ivar learning_rate: Initial learning rate. Must be a float in the range [0, 1]. + :vartype learning_rate: str + :ivar learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or + 'step'. + :vartype learning_rate_scheduler: str + :ivar model_name: Name of the model to use for training. + For more information on the available models please visit the official documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :vartype model_name: str + :ivar momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, 1]. + :vartype momentum: str + :ivar nesterov: Enable nesterov when optimizer is 'sgd'. + :vartype nesterov: str + :ivar number_of_epochs: Number of training epochs. Must be a positive integer. + :vartype number_of_epochs: str + :ivar number_of_workers: Number of data loader workers. Must be a non-negative integer. + :vartype number_of_workers: str + :ivar optimizer: Type of optimizer. Must be either 'sgd', 'adam', or 'adamw'. + :vartype optimizer: str + :ivar random_seed: Random seed to be used when using deterministic training. + :vartype random_seed: str + :ivar step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float in + the range [0, 1]. + :vartype step_lr_gamma: str + :ivar step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be a + positive integer. + :vartype step_lr_step_size: str + :ivar training_batch_size: Training batch size. Must be a positive integer. + :vartype training_batch_size: str + :ivar validation_batch_size: Validation batch size. Must be a positive integer. + :vartype validation_batch_size: str + :ivar warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is + 'warmup_cosine'. Must be a float in the range [0, 1]. + :vartype warmup_cosine_lr_cycles: str + :ivar warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is + 'warmup_cosine'. Must be a positive integer. + :vartype warmup_cosine_lr_warmup_epochs: str + :ivar weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must be + a float in the range[0, 1]. + :vartype weight_decay: str + """ + + _attribute_map = { + "ams_gradient": {"key": "amsGradient", "type": "str"}, + "augmentations": {"key": "augmentations", "type": "str"}, + "beta1": {"key": "beta1", "type": "str"}, + "beta2": {"key": "beta2", "type": "str"}, + "distributed": {"key": "distributed", "type": "str"}, + "early_stopping": {"key": "earlyStopping", "type": "str"}, + "early_stopping_delay": {"key": "earlyStoppingDelay", "type": "str"}, + "early_stopping_patience": {"key": "earlyStoppingPatience", "type": "str"}, + "enable_onnx_normalization": {"key": "enableOnnxNormalization", "type": "str"}, + "evaluation_frequency": {"key": "evaluationFrequency", "type": "str"}, + "gradient_accumulation_step": {"key": "gradientAccumulationStep", "type": "str"}, + "layers_to_freeze": {"key": "layersToFreeze", "type": "str"}, + "learning_rate": {"key": "learningRate", "type": "str"}, + "learning_rate_scheduler": {"key": "learningRateScheduler", "type": "str"}, + "model_name": {"key": "modelName", "type": "str"}, + "momentum": {"key": "momentum", "type": "str"}, + "nesterov": {"key": "nesterov", "type": "str"}, + "number_of_epochs": {"key": "numberOfEpochs", "type": "str"}, + "number_of_workers": {"key": "numberOfWorkers", "type": "str"}, + "optimizer": {"key": "optimizer", "type": "str"}, + "random_seed": {"key": "randomSeed", "type": "str"}, + "step_lr_gamma": {"key": "stepLRGamma", "type": "str"}, + "step_lr_step_size": {"key": "stepLRStepSize", "type": "str"}, + "training_batch_size": {"key": "trainingBatchSize", "type": "str"}, + "validation_batch_size": {"key": "validationBatchSize", "type": "str"}, + "warmup_cosine_lr_cycles": {"key": "warmupCosineLRCycles", "type": "str"}, + "warmup_cosine_lr_warmup_epochs": {"key": "warmupCosineLRWarmupEpochs", "type": "str"}, + "weight_decay": {"key": "weightDecay", "type": "str"}, + } + + def __init__(self, **kwargs): """ :keyword ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. :paramtype ams_gradient: str @@ -17280,183 +16323,180 @@ def __init__( :paramtype weight_decay: str """ super(ImageModelDistributionSettings, self).__init__(**kwargs) - self.ams_gradient = kwargs.get('ams_gradient', None) - self.augmentations = kwargs.get('augmentations', None) - self.beta1 = kwargs.get('beta1', None) - self.beta2 = kwargs.get('beta2', None) - self.distributed = kwargs.get('distributed', None) - self.early_stopping = kwargs.get('early_stopping', None) - self.early_stopping_delay = kwargs.get('early_stopping_delay', None) - self.early_stopping_patience = kwargs.get('early_stopping_patience', None) - self.enable_onnx_normalization = kwargs.get('enable_onnx_normalization', None) - self.evaluation_frequency = kwargs.get('evaluation_frequency', None) - self.gradient_accumulation_step = kwargs.get('gradient_accumulation_step', None) - self.layers_to_freeze = kwargs.get('layers_to_freeze', None) - self.learning_rate = kwargs.get('learning_rate', None) - self.learning_rate_scheduler = kwargs.get('learning_rate_scheduler', None) - self.model_name = kwargs.get('model_name', None) - self.momentum = kwargs.get('momentum', None) - self.nesterov = kwargs.get('nesterov', None) - self.number_of_epochs = kwargs.get('number_of_epochs', None) - self.number_of_workers = kwargs.get('number_of_workers', None) - self.optimizer = kwargs.get('optimizer', None) - self.random_seed = kwargs.get('random_seed', None) - self.step_lr_gamma = kwargs.get('step_lr_gamma', None) - self.step_lr_step_size = kwargs.get('step_lr_step_size', None) - self.training_batch_size = kwargs.get('training_batch_size', None) - self.validation_batch_size = kwargs.get('validation_batch_size', None) - self.warmup_cosine_lr_cycles = kwargs.get('warmup_cosine_lr_cycles', None) - self.warmup_cosine_lr_warmup_epochs = kwargs.get('warmup_cosine_lr_warmup_epochs', None) - self.weight_decay = kwargs.get('weight_decay', None) + self.ams_gradient = kwargs.get("ams_gradient", None) + self.augmentations = kwargs.get("augmentations", None) + self.beta1 = kwargs.get("beta1", None) + self.beta2 = kwargs.get("beta2", None) + self.distributed = kwargs.get("distributed", None) + self.early_stopping = kwargs.get("early_stopping", None) + self.early_stopping_delay = kwargs.get("early_stopping_delay", None) + self.early_stopping_patience = kwargs.get("early_stopping_patience", None) + self.enable_onnx_normalization = kwargs.get("enable_onnx_normalization", None) + self.evaluation_frequency = kwargs.get("evaluation_frequency", None) + self.gradient_accumulation_step = kwargs.get("gradient_accumulation_step", None) + self.layers_to_freeze = kwargs.get("layers_to_freeze", None) + self.learning_rate = kwargs.get("learning_rate", None) + self.learning_rate_scheduler = kwargs.get("learning_rate_scheduler", None) + self.model_name = kwargs.get("model_name", None) + self.momentum = kwargs.get("momentum", None) + self.nesterov = kwargs.get("nesterov", None) + self.number_of_epochs = kwargs.get("number_of_epochs", None) + self.number_of_workers = kwargs.get("number_of_workers", None) + self.optimizer = kwargs.get("optimizer", None) + self.random_seed = kwargs.get("random_seed", None) + self.step_lr_gamma = kwargs.get("step_lr_gamma", None) + self.step_lr_step_size = kwargs.get("step_lr_step_size", None) + self.training_batch_size = kwargs.get("training_batch_size", None) + self.validation_batch_size = kwargs.get("validation_batch_size", None) + self.warmup_cosine_lr_cycles = kwargs.get("warmup_cosine_lr_cycles", None) + self.warmup_cosine_lr_warmup_epochs = kwargs.get("warmup_cosine_lr_warmup_epochs", None) + self.weight_decay = kwargs.get("weight_decay", None) class ImageModelDistributionSettingsClassification(ImageModelDistributionSettings): """Distribution expressions to sweep over values of model settings. -:code:` -Some examples are: -``` -ModelName = "choice('seresnext', 'resnest50')"; -LearningRate = "uniform(0.001, 0.01)"; -LayersToFreeze = "choice(0, 2)"; -```` -For more details on how to compose distribution expressions please check the documentation: -https://docs.microsoft.com/en-us/azure/machine-learning/how-to-tune-hyperparameters -For more information on the available settings please visit the official documentation: -https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - - :ivar ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. - :vartype ams_gradient: str - :ivar augmentations: Settings for using Augmentations. - :vartype augmentations: str - :ivar beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the range - [0, 1]. - :vartype beta1: str - :ivar beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the range - [0, 1]. - :vartype beta2: str - :ivar distributed: Whether to use distributer training. - :vartype distributed: str - :ivar early_stopping: Enable early stopping logic during training. - :vartype early_stopping: str - :ivar early_stopping_delay: Minimum number of epochs or validation evaluations to wait before - primary metric improvement - is tracked for early stopping. Must be a positive integer. - :vartype early_stopping_delay: str - :ivar early_stopping_patience: Minimum number of epochs or validation evaluations with no - primary metric improvement before - the run is stopped. Must be a positive integer. - :vartype early_stopping_patience: str - :ivar enable_onnx_normalization: Enable normalization when exporting ONNX model. - :vartype enable_onnx_normalization: str - :ivar evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. Must - be a positive integer. - :vartype evaluation_frequency: str - :ivar gradient_accumulation_step: Gradient accumulation means running a configured number of - "GradAccumulationStep" steps without - updating the model weights while accumulating the gradients of those steps, and then using - the accumulated gradients to compute the weight updates. Must be a positive integer. - :vartype gradient_accumulation_step: str - :ivar layers_to_freeze: Number of layers to freeze for the model. Must be a positive integer. - For instance, passing 2 as value for 'seresnext' means - freezing layer0 and layer1. For a full list of models supported and details on layer freeze, - please - see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - :vartype layers_to_freeze: str - :ivar learning_rate: Initial learning rate. Must be a float in the range [0, 1]. - :vartype learning_rate: str - :ivar learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or - 'step'. - :vartype learning_rate_scheduler: str - :ivar model_name: Name of the model to use for training. - For more information on the available models please visit the official documentation: - https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - :vartype model_name: str - :ivar momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, 1]. - :vartype momentum: str - :ivar nesterov: Enable nesterov when optimizer is 'sgd'. - :vartype nesterov: str - :ivar number_of_epochs: Number of training epochs. Must be a positive integer. - :vartype number_of_epochs: str - :ivar number_of_workers: Number of data loader workers. Must be a non-negative integer. - :vartype number_of_workers: str - :ivar optimizer: Type of optimizer. Must be either 'sgd', 'adam', or 'adamw'. - :vartype optimizer: str - :ivar random_seed: Random seed to be used when using deterministic training. - :vartype random_seed: str - :ivar step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float in - the range [0, 1]. - :vartype step_lr_gamma: str - :ivar step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be a - positive integer. - :vartype step_lr_step_size: str - :ivar training_batch_size: Training batch size. Must be a positive integer. - :vartype training_batch_size: str - :ivar validation_batch_size: Validation batch size. Must be a positive integer. - :vartype validation_batch_size: str - :ivar warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is - 'warmup_cosine'. Must be a float in the range [0, 1]. - :vartype warmup_cosine_lr_cycles: str - :ivar warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is - 'warmup_cosine'. Must be a positive integer. - :vartype warmup_cosine_lr_warmup_epochs: str - :ivar weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must be - a float in the range[0, 1]. - :vartype weight_decay: str - :ivar training_crop_size: Image crop size that is input to the neural network for the training - dataset. Must be a positive integer. - :vartype training_crop_size: str - :ivar validation_crop_size: Image crop size that is input to the neural network for the - validation dataset. Must be a positive integer. - :vartype validation_crop_size: str - :ivar validation_resize_size: Image size to which to resize before cropping for validation - dataset. Must be a positive integer. - :vartype validation_resize_size: str - :ivar weighted_loss: Weighted loss. The accepted values are 0 for no weighted loss. - 1 for weighted loss with sqrt.(class_weights). 2 for weighted loss with class_weights. Must be - 0 or 1 or 2. - :vartype weighted_loss: str - """ - - _attribute_map = { - 'ams_gradient': {'key': 'amsGradient', 'type': 'str'}, - 'augmentations': {'key': 'augmentations', 'type': 'str'}, - 'beta1': {'key': 'beta1', 'type': 'str'}, - 'beta2': {'key': 'beta2', 'type': 'str'}, - 'distributed': {'key': 'distributed', 'type': 'str'}, - 'early_stopping': {'key': 'earlyStopping', 'type': 'str'}, - 'early_stopping_delay': {'key': 'earlyStoppingDelay', 'type': 'str'}, - 'early_stopping_patience': {'key': 'earlyStoppingPatience', 'type': 'str'}, - 'enable_onnx_normalization': {'key': 'enableOnnxNormalization', 'type': 'str'}, - 'evaluation_frequency': {'key': 'evaluationFrequency', 'type': 'str'}, - 'gradient_accumulation_step': {'key': 'gradientAccumulationStep', 'type': 'str'}, - 'layers_to_freeze': {'key': 'layersToFreeze', 'type': 'str'}, - 'learning_rate': {'key': 'learningRate', 'type': 'str'}, - 'learning_rate_scheduler': {'key': 'learningRateScheduler', 'type': 'str'}, - 'model_name': {'key': 'modelName', 'type': 'str'}, - 'momentum': {'key': 'momentum', 'type': 'str'}, - 'nesterov': {'key': 'nesterov', 'type': 'str'}, - 'number_of_epochs': {'key': 'numberOfEpochs', 'type': 'str'}, - 'number_of_workers': {'key': 'numberOfWorkers', 'type': 'str'}, - 'optimizer': {'key': 'optimizer', 'type': 'str'}, - 'random_seed': {'key': 'randomSeed', 'type': 'str'}, - 'step_lr_gamma': {'key': 'stepLRGamma', 'type': 'str'}, - 'step_lr_step_size': {'key': 'stepLRStepSize', 'type': 'str'}, - 'training_batch_size': {'key': 'trainingBatchSize', 'type': 'str'}, - 'validation_batch_size': {'key': 'validationBatchSize', 'type': 'str'}, - 'warmup_cosine_lr_cycles': {'key': 'warmupCosineLRCycles', 'type': 'str'}, - 'warmup_cosine_lr_warmup_epochs': {'key': 'warmupCosineLRWarmupEpochs', 'type': 'str'}, - 'weight_decay': {'key': 'weightDecay', 'type': 'str'}, - 'training_crop_size': {'key': 'trainingCropSize', 'type': 'str'}, - 'validation_crop_size': {'key': 'validationCropSize', 'type': 'str'}, - 'validation_resize_size': {'key': 'validationResizeSize', 'type': 'str'}, - 'weighted_loss': {'key': 'weightedLoss', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): + :code:` + Some examples are: + ``` + ModelName = "choice('seresnext', 'resnest50')"; + LearningRate = "uniform(0.001, 0.01)"; + LayersToFreeze = "choice(0, 2)"; + ```` + For more details on how to compose distribution expressions please check the documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-tune-hyperparameters + For more information on the available settings please visit the official documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + + :ivar ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. + :vartype ams_gradient: str + :ivar augmentations: Settings for using Augmentations. + :vartype augmentations: str + :ivar beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the range + [0, 1]. + :vartype beta1: str + :ivar beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the range + [0, 1]. + :vartype beta2: str + :ivar distributed: Whether to use distributer training. + :vartype distributed: str + :ivar early_stopping: Enable early stopping logic during training. + :vartype early_stopping: str + :ivar early_stopping_delay: Minimum number of epochs or validation evaluations to wait before + primary metric improvement + is tracked for early stopping. Must be a positive integer. + :vartype early_stopping_delay: str + :ivar early_stopping_patience: Minimum number of epochs or validation evaluations with no + primary metric improvement before + the run is stopped. Must be a positive integer. + :vartype early_stopping_patience: str + :ivar enable_onnx_normalization: Enable normalization when exporting ONNX model. + :vartype enable_onnx_normalization: str + :ivar evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. Must + be a positive integer. + :vartype evaluation_frequency: str + :ivar gradient_accumulation_step: Gradient accumulation means running a configured number of + "GradAccumulationStep" steps without + updating the model weights while accumulating the gradients of those steps, and then using + the accumulated gradients to compute the weight updates. Must be a positive integer. + :vartype gradient_accumulation_step: str + :ivar layers_to_freeze: Number of layers to freeze for the model. Must be a positive integer. + For instance, passing 2 as value for 'seresnext' means + freezing layer0 and layer1. For a full list of models supported and details on layer freeze, + please + see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :vartype layers_to_freeze: str + :ivar learning_rate: Initial learning rate. Must be a float in the range [0, 1]. + :vartype learning_rate: str + :ivar learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or + 'step'. + :vartype learning_rate_scheduler: str + :ivar model_name: Name of the model to use for training. + For more information on the available models please visit the official documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :vartype model_name: str + :ivar momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, 1]. + :vartype momentum: str + :ivar nesterov: Enable nesterov when optimizer is 'sgd'. + :vartype nesterov: str + :ivar number_of_epochs: Number of training epochs. Must be a positive integer. + :vartype number_of_epochs: str + :ivar number_of_workers: Number of data loader workers. Must be a non-negative integer. + :vartype number_of_workers: str + :ivar optimizer: Type of optimizer. Must be either 'sgd', 'adam', or 'adamw'. + :vartype optimizer: str + :ivar random_seed: Random seed to be used when using deterministic training. + :vartype random_seed: str + :ivar step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float in + the range [0, 1]. + :vartype step_lr_gamma: str + :ivar step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be a + positive integer. + :vartype step_lr_step_size: str + :ivar training_batch_size: Training batch size. Must be a positive integer. + :vartype training_batch_size: str + :ivar validation_batch_size: Validation batch size. Must be a positive integer. + :vartype validation_batch_size: str + :ivar warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is + 'warmup_cosine'. Must be a float in the range [0, 1]. + :vartype warmup_cosine_lr_cycles: str + :ivar warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is + 'warmup_cosine'. Must be a positive integer. + :vartype warmup_cosine_lr_warmup_epochs: str + :ivar weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must be + a float in the range[0, 1]. + :vartype weight_decay: str + :ivar training_crop_size: Image crop size that is input to the neural network for the training + dataset. Must be a positive integer. + :vartype training_crop_size: str + :ivar validation_crop_size: Image crop size that is input to the neural network for the + validation dataset. Must be a positive integer. + :vartype validation_crop_size: str + :ivar validation_resize_size: Image size to which to resize before cropping for validation + dataset. Must be a positive integer. + :vartype validation_resize_size: str + :ivar weighted_loss: Weighted loss. The accepted values are 0 for no weighted loss. + 1 for weighted loss with sqrt.(class_weights). 2 for weighted loss with class_weights. Must be + 0 or 1 or 2. + :vartype weighted_loss: str + """ + + _attribute_map = { + "ams_gradient": {"key": "amsGradient", "type": "str"}, + "augmentations": {"key": "augmentations", "type": "str"}, + "beta1": {"key": "beta1", "type": "str"}, + "beta2": {"key": "beta2", "type": "str"}, + "distributed": {"key": "distributed", "type": "str"}, + "early_stopping": {"key": "earlyStopping", "type": "str"}, + "early_stopping_delay": {"key": "earlyStoppingDelay", "type": "str"}, + "early_stopping_patience": {"key": "earlyStoppingPatience", "type": "str"}, + "enable_onnx_normalization": {"key": "enableOnnxNormalization", "type": "str"}, + "evaluation_frequency": {"key": "evaluationFrequency", "type": "str"}, + "gradient_accumulation_step": {"key": "gradientAccumulationStep", "type": "str"}, + "layers_to_freeze": {"key": "layersToFreeze", "type": "str"}, + "learning_rate": {"key": "learningRate", "type": "str"}, + "learning_rate_scheduler": {"key": "learningRateScheduler", "type": "str"}, + "model_name": {"key": "modelName", "type": "str"}, + "momentum": {"key": "momentum", "type": "str"}, + "nesterov": {"key": "nesterov", "type": "str"}, + "number_of_epochs": {"key": "numberOfEpochs", "type": "str"}, + "number_of_workers": {"key": "numberOfWorkers", "type": "str"}, + "optimizer": {"key": "optimizer", "type": "str"}, + "random_seed": {"key": "randomSeed", "type": "str"}, + "step_lr_gamma": {"key": "stepLRGamma", "type": "str"}, + "step_lr_step_size": {"key": "stepLRStepSize", "type": "str"}, + "training_batch_size": {"key": "trainingBatchSize", "type": "str"}, + "validation_batch_size": {"key": "validationBatchSize", "type": "str"}, + "warmup_cosine_lr_cycles": {"key": "warmupCosineLRCycles", "type": "str"}, + "warmup_cosine_lr_warmup_epochs": {"key": "warmupCosineLRWarmupEpochs", "type": "str"}, + "weight_decay": {"key": "weightDecay", "type": "str"}, + "training_crop_size": {"key": "trainingCropSize", "type": "str"}, + "validation_crop_size": {"key": "validationCropSize", "type": "str"}, + "validation_resize_size": {"key": "validationResizeSize", "type": "str"}, + "weighted_loss": {"key": "weightedLoss", "type": "str"}, + } + + def __init__(self, **kwargs): """ :keyword ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. :paramtype ams_gradient: str @@ -17553,207 +16593,204 @@ def __init__( :paramtype weighted_loss: str """ super(ImageModelDistributionSettingsClassification, self).__init__(**kwargs) - self.training_crop_size = kwargs.get('training_crop_size', None) - self.validation_crop_size = kwargs.get('validation_crop_size', None) - self.validation_resize_size = kwargs.get('validation_resize_size', None) - self.weighted_loss = kwargs.get('weighted_loss', None) + self.training_crop_size = kwargs.get("training_crop_size", None) + self.validation_crop_size = kwargs.get("validation_crop_size", None) + self.validation_resize_size = kwargs.get("validation_resize_size", None) + self.weighted_loss = kwargs.get("weighted_loss", None) class ImageModelDistributionSettingsObjectDetection(ImageModelDistributionSettings): """Distribution expressions to sweep over values of model settings. -:code:` -Some examples are: -``` -ModelName = "choice('seresnext', 'resnest50')"; -LearningRate = "uniform(0.001, 0.01)"; -LayersToFreeze = "choice(0, 2)"; -```` -For more details on how to compose distribution expressions please check the documentation: -https://docs.microsoft.com/en-us/azure/machine-learning/how-to-tune-hyperparameters -For more information on the available settings please visit the official documentation: -https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - - :ivar ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. - :vartype ams_gradient: str - :ivar augmentations: Settings for using Augmentations. - :vartype augmentations: str - :ivar beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the range - [0, 1]. - :vartype beta1: str - :ivar beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the range - [0, 1]. - :vartype beta2: str - :ivar distributed: Whether to use distributer training. - :vartype distributed: str - :ivar early_stopping: Enable early stopping logic during training. - :vartype early_stopping: str - :ivar early_stopping_delay: Minimum number of epochs or validation evaluations to wait before - primary metric improvement - is tracked for early stopping. Must be a positive integer. - :vartype early_stopping_delay: str - :ivar early_stopping_patience: Minimum number of epochs or validation evaluations with no - primary metric improvement before - the run is stopped. Must be a positive integer. - :vartype early_stopping_patience: str - :ivar enable_onnx_normalization: Enable normalization when exporting ONNX model. - :vartype enable_onnx_normalization: str - :ivar evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. Must - be a positive integer. - :vartype evaluation_frequency: str - :ivar gradient_accumulation_step: Gradient accumulation means running a configured number of - "GradAccumulationStep" steps without - updating the model weights while accumulating the gradients of those steps, and then using - the accumulated gradients to compute the weight updates. Must be a positive integer. - :vartype gradient_accumulation_step: str - :ivar layers_to_freeze: Number of layers to freeze for the model. Must be a positive integer. - For instance, passing 2 as value for 'seresnext' means - freezing layer0 and layer1. For a full list of models supported and details on layer freeze, - please - see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - :vartype layers_to_freeze: str - :ivar learning_rate: Initial learning rate. Must be a float in the range [0, 1]. - :vartype learning_rate: str - :ivar learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or - 'step'. - :vartype learning_rate_scheduler: str - :ivar model_name: Name of the model to use for training. - For more information on the available models please visit the official documentation: - https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - :vartype model_name: str - :ivar momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, 1]. - :vartype momentum: str - :ivar nesterov: Enable nesterov when optimizer is 'sgd'. - :vartype nesterov: str - :ivar number_of_epochs: Number of training epochs. Must be a positive integer. - :vartype number_of_epochs: str - :ivar number_of_workers: Number of data loader workers. Must be a non-negative integer. - :vartype number_of_workers: str - :ivar optimizer: Type of optimizer. Must be either 'sgd', 'adam', or 'adamw'. - :vartype optimizer: str - :ivar random_seed: Random seed to be used when using deterministic training. - :vartype random_seed: str - :ivar step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float in - the range [0, 1]. - :vartype step_lr_gamma: str - :ivar step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be a - positive integer. - :vartype step_lr_step_size: str - :ivar training_batch_size: Training batch size. Must be a positive integer. - :vartype training_batch_size: str - :ivar validation_batch_size: Validation batch size. Must be a positive integer. - :vartype validation_batch_size: str - :ivar warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is - 'warmup_cosine'. Must be a float in the range [0, 1]. - :vartype warmup_cosine_lr_cycles: str - :ivar warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is - 'warmup_cosine'. Must be a positive integer. - :vartype warmup_cosine_lr_warmup_epochs: str - :ivar weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must be - a float in the range[0, 1]. - :vartype weight_decay: str - :ivar box_detections_per_image: Maximum number of detections per image, for all classes. Must - be a positive integer. - Note: This settings is not supported for the 'yolov5' algorithm. - :vartype box_detections_per_image: str - :ivar box_score_threshold: During inference, only return proposals with a classification score - greater than - BoxScoreThreshold. Must be a float in the range[0, 1]. - :vartype box_score_threshold: str - :ivar image_size: Image size for train and validation. Must be a positive integer. - Note: The training run may get into CUDA OOM if the size is too big. - Note: This settings is only supported for the 'yolov5' algorithm. - :vartype image_size: str - :ivar max_size: Maximum size of the image to be rescaled before feeding it to the backbone. - Must be a positive integer. Note: training run may get into CUDA OOM if the size is too big. - Note: This settings is not supported for the 'yolov5' algorithm. - :vartype max_size: str - :ivar min_size: Minimum size of the image to be rescaled before feeding it to the backbone. - Must be a positive integer. Note: training run may get into CUDA OOM if the size is too big. - Note: This settings is not supported for the 'yolov5' algorithm. - :vartype min_size: str - :ivar model_size: Model size. Must be 'small', 'medium', 'large', or 'xlarge'. - Note: training run may get into CUDA OOM if the model size is too big. - Note: This settings is only supported for the 'yolov5' algorithm. - :vartype model_size: str - :ivar multi_scale: Enable multi-scale image by varying image size by +/- 50%. - Note: training run may get into CUDA OOM if no sufficient GPU memory. - Note: This settings is only supported for the 'yolov5' algorithm. - :vartype multi_scale: str - :ivar nms_iou_threshold: IOU threshold used during inference in NMS post processing. Must be - float in the range [0, 1]. - :vartype nms_iou_threshold: str - :ivar tile_grid_size: The grid size to use for tiling each image. Note: TileGridSize must not - be - None to enable small object detection logic. A string containing two integers in mxn format. - Note: This settings is not supported for the 'yolov5' algorithm. - :vartype tile_grid_size: str - :ivar tile_overlap_ratio: Overlap ratio between adjacent tiles in each dimension. Must be float - in the range [0, 1). - Note: This settings is not supported for the 'yolov5' algorithm. - :vartype tile_overlap_ratio: str - :ivar tile_predictions_nms_threshold: The IOU threshold to use to perform NMS while merging - predictions from tiles and image. - Used in validation/ inference. Must be float in the range [0, 1]. - Note: This settings is not supported for the 'yolov5' algorithm. - NMS: Non-maximum suppression. - :vartype tile_predictions_nms_threshold: str - :ivar validation_iou_threshold: IOU threshold to use when computing validation metric. Must be - float in the range [0, 1]. - :vartype validation_iou_threshold: str - :ivar validation_metric_type: Metric computation method to use for validation metrics. Must be - 'none', 'coco', 'voc', or 'coco_voc'. - :vartype validation_metric_type: str - """ - - _attribute_map = { - 'ams_gradient': {'key': 'amsGradient', 'type': 'str'}, - 'augmentations': {'key': 'augmentations', 'type': 'str'}, - 'beta1': {'key': 'beta1', 'type': 'str'}, - 'beta2': {'key': 'beta2', 'type': 'str'}, - 'distributed': {'key': 'distributed', 'type': 'str'}, - 'early_stopping': {'key': 'earlyStopping', 'type': 'str'}, - 'early_stopping_delay': {'key': 'earlyStoppingDelay', 'type': 'str'}, - 'early_stopping_patience': {'key': 'earlyStoppingPatience', 'type': 'str'}, - 'enable_onnx_normalization': {'key': 'enableOnnxNormalization', 'type': 'str'}, - 'evaluation_frequency': {'key': 'evaluationFrequency', 'type': 'str'}, - 'gradient_accumulation_step': {'key': 'gradientAccumulationStep', 'type': 'str'}, - 'layers_to_freeze': {'key': 'layersToFreeze', 'type': 'str'}, - 'learning_rate': {'key': 'learningRate', 'type': 'str'}, - 'learning_rate_scheduler': {'key': 'learningRateScheduler', 'type': 'str'}, - 'model_name': {'key': 'modelName', 'type': 'str'}, - 'momentum': {'key': 'momentum', 'type': 'str'}, - 'nesterov': {'key': 'nesterov', 'type': 'str'}, - 'number_of_epochs': {'key': 'numberOfEpochs', 'type': 'str'}, - 'number_of_workers': {'key': 'numberOfWorkers', 'type': 'str'}, - 'optimizer': {'key': 'optimizer', 'type': 'str'}, - 'random_seed': {'key': 'randomSeed', 'type': 'str'}, - 'step_lr_gamma': {'key': 'stepLRGamma', 'type': 'str'}, - 'step_lr_step_size': {'key': 'stepLRStepSize', 'type': 'str'}, - 'training_batch_size': {'key': 'trainingBatchSize', 'type': 'str'}, - 'validation_batch_size': {'key': 'validationBatchSize', 'type': 'str'}, - 'warmup_cosine_lr_cycles': {'key': 'warmupCosineLRCycles', 'type': 'str'}, - 'warmup_cosine_lr_warmup_epochs': {'key': 'warmupCosineLRWarmupEpochs', 'type': 'str'}, - 'weight_decay': {'key': 'weightDecay', 'type': 'str'}, - 'box_detections_per_image': {'key': 'boxDetectionsPerImage', 'type': 'str'}, - 'box_score_threshold': {'key': 'boxScoreThreshold', 'type': 'str'}, - 'image_size': {'key': 'imageSize', 'type': 'str'}, - 'max_size': {'key': 'maxSize', 'type': 'str'}, - 'min_size': {'key': 'minSize', 'type': 'str'}, - 'model_size': {'key': 'modelSize', 'type': 'str'}, - 'multi_scale': {'key': 'multiScale', 'type': 'str'}, - 'nms_iou_threshold': {'key': 'nmsIouThreshold', 'type': 'str'}, - 'tile_grid_size': {'key': 'tileGridSize', 'type': 'str'}, - 'tile_overlap_ratio': {'key': 'tileOverlapRatio', 'type': 'str'}, - 'tile_predictions_nms_threshold': {'key': 'tilePredictionsNmsThreshold', 'type': 'str'}, - 'validation_iou_threshold': {'key': 'validationIouThreshold', 'type': 'str'}, - 'validation_metric_type': {'key': 'validationMetricType', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): + :code:` + Some examples are: + ``` + ModelName = "choice('seresnext', 'resnest50')"; + LearningRate = "uniform(0.001, 0.01)"; + LayersToFreeze = "choice(0, 2)"; + ```` + For more details on how to compose distribution expressions please check the documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-tune-hyperparameters + For more information on the available settings please visit the official documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + + :ivar ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. + :vartype ams_gradient: str + :ivar augmentations: Settings for using Augmentations. + :vartype augmentations: str + :ivar beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the range + [0, 1]. + :vartype beta1: str + :ivar beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the range + [0, 1]. + :vartype beta2: str + :ivar distributed: Whether to use distributer training. + :vartype distributed: str + :ivar early_stopping: Enable early stopping logic during training. + :vartype early_stopping: str + :ivar early_stopping_delay: Minimum number of epochs or validation evaluations to wait before + primary metric improvement + is tracked for early stopping. Must be a positive integer. + :vartype early_stopping_delay: str + :ivar early_stopping_patience: Minimum number of epochs or validation evaluations with no + primary metric improvement before + the run is stopped. Must be a positive integer. + :vartype early_stopping_patience: str + :ivar enable_onnx_normalization: Enable normalization when exporting ONNX model. + :vartype enable_onnx_normalization: str + :ivar evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. Must + be a positive integer. + :vartype evaluation_frequency: str + :ivar gradient_accumulation_step: Gradient accumulation means running a configured number of + "GradAccumulationStep" steps without + updating the model weights while accumulating the gradients of those steps, and then using + the accumulated gradients to compute the weight updates. Must be a positive integer. + :vartype gradient_accumulation_step: str + :ivar layers_to_freeze: Number of layers to freeze for the model. Must be a positive integer. + For instance, passing 2 as value for 'seresnext' means + freezing layer0 and layer1. For a full list of models supported and details on layer freeze, + please + see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :vartype layers_to_freeze: str + :ivar learning_rate: Initial learning rate. Must be a float in the range [0, 1]. + :vartype learning_rate: str + :ivar learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or + 'step'. + :vartype learning_rate_scheduler: str + :ivar model_name: Name of the model to use for training. + For more information on the available models please visit the official documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :vartype model_name: str + :ivar momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, 1]. + :vartype momentum: str + :ivar nesterov: Enable nesterov when optimizer is 'sgd'. + :vartype nesterov: str + :ivar number_of_epochs: Number of training epochs. Must be a positive integer. + :vartype number_of_epochs: str + :ivar number_of_workers: Number of data loader workers. Must be a non-negative integer. + :vartype number_of_workers: str + :ivar optimizer: Type of optimizer. Must be either 'sgd', 'adam', or 'adamw'. + :vartype optimizer: str + :ivar random_seed: Random seed to be used when using deterministic training. + :vartype random_seed: str + :ivar step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float in + the range [0, 1]. + :vartype step_lr_gamma: str + :ivar step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be a + positive integer. + :vartype step_lr_step_size: str + :ivar training_batch_size: Training batch size. Must be a positive integer. + :vartype training_batch_size: str + :ivar validation_batch_size: Validation batch size. Must be a positive integer. + :vartype validation_batch_size: str + :ivar warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is + 'warmup_cosine'. Must be a float in the range [0, 1]. + :vartype warmup_cosine_lr_cycles: str + :ivar warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is + 'warmup_cosine'. Must be a positive integer. + :vartype warmup_cosine_lr_warmup_epochs: str + :ivar weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must be + a float in the range[0, 1]. + :vartype weight_decay: str + :ivar box_detections_per_image: Maximum number of detections per image, for all classes. Must + be a positive integer. + Note: This settings is not supported for the 'yolov5' algorithm. + :vartype box_detections_per_image: str + :ivar box_score_threshold: During inference, only return proposals with a classification score + greater than + BoxScoreThreshold. Must be a float in the range[0, 1]. + :vartype box_score_threshold: str + :ivar image_size: Image size for train and validation. Must be a positive integer. + Note: The training run may get into CUDA OOM if the size is too big. + Note: This settings is only supported for the 'yolov5' algorithm. + :vartype image_size: str + :ivar max_size: Maximum size of the image to be rescaled before feeding it to the backbone. + Must be a positive integer. Note: training run may get into CUDA OOM if the size is too big. + Note: This settings is not supported for the 'yolov5' algorithm. + :vartype max_size: str + :ivar min_size: Minimum size of the image to be rescaled before feeding it to the backbone. + Must be a positive integer. Note: training run may get into CUDA OOM if the size is too big. + Note: This settings is not supported for the 'yolov5' algorithm. + :vartype min_size: str + :ivar model_size: Model size. Must be 'small', 'medium', 'large', or 'xlarge'. + Note: training run may get into CUDA OOM if the model size is too big. + Note: This settings is only supported for the 'yolov5' algorithm. + :vartype model_size: str + :ivar multi_scale: Enable multi-scale image by varying image size by +/- 50%. + Note: training run may get into CUDA OOM if no sufficient GPU memory. + Note: This settings is only supported for the 'yolov5' algorithm. + :vartype multi_scale: str + :ivar nms_iou_threshold: IOU threshold used during inference in NMS post processing. Must be + float in the range [0, 1]. + :vartype nms_iou_threshold: str + :ivar tile_grid_size: The grid size to use for tiling each image. Note: TileGridSize must not + be + None to enable small object detection logic. A string containing two integers in mxn format. + Note: This settings is not supported for the 'yolov5' algorithm. + :vartype tile_grid_size: str + :ivar tile_overlap_ratio: Overlap ratio between adjacent tiles in each dimension. Must be float + in the range [0, 1). + Note: This settings is not supported for the 'yolov5' algorithm. + :vartype tile_overlap_ratio: str + :ivar tile_predictions_nms_threshold: The IOU threshold to use to perform NMS while merging + predictions from tiles and image. + Used in validation/ inference. Must be float in the range [0, 1]. + Note: This settings is not supported for the 'yolov5' algorithm. + NMS: Non-maximum suppression. + :vartype tile_predictions_nms_threshold: str + :ivar validation_iou_threshold: IOU threshold to use when computing validation metric. Must be + float in the range [0, 1]. + :vartype validation_iou_threshold: str + :ivar validation_metric_type: Metric computation method to use for validation metrics. Must be + 'none', 'coco', 'voc', or 'coco_voc'. + :vartype validation_metric_type: str + """ + + _attribute_map = { + "ams_gradient": {"key": "amsGradient", "type": "str"}, + "augmentations": {"key": "augmentations", "type": "str"}, + "beta1": {"key": "beta1", "type": "str"}, + "beta2": {"key": "beta2", "type": "str"}, + "distributed": {"key": "distributed", "type": "str"}, + "early_stopping": {"key": "earlyStopping", "type": "str"}, + "early_stopping_delay": {"key": "earlyStoppingDelay", "type": "str"}, + "early_stopping_patience": {"key": "earlyStoppingPatience", "type": "str"}, + "enable_onnx_normalization": {"key": "enableOnnxNormalization", "type": "str"}, + "evaluation_frequency": {"key": "evaluationFrequency", "type": "str"}, + "gradient_accumulation_step": {"key": "gradientAccumulationStep", "type": "str"}, + "layers_to_freeze": {"key": "layersToFreeze", "type": "str"}, + "learning_rate": {"key": "learningRate", "type": "str"}, + "learning_rate_scheduler": {"key": "learningRateScheduler", "type": "str"}, + "model_name": {"key": "modelName", "type": "str"}, + "momentum": {"key": "momentum", "type": "str"}, + "nesterov": {"key": "nesterov", "type": "str"}, + "number_of_epochs": {"key": "numberOfEpochs", "type": "str"}, + "number_of_workers": {"key": "numberOfWorkers", "type": "str"}, + "optimizer": {"key": "optimizer", "type": "str"}, + "random_seed": {"key": "randomSeed", "type": "str"}, + "step_lr_gamma": {"key": "stepLRGamma", "type": "str"}, + "step_lr_step_size": {"key": "stepLRStepSize", "type": "str"}, + "training_batch_size": {"key": "trainingBatchSize", "type": "str"}, + "validation_batch_size": {"key": "validationBatchSize", "type": "str"}, + "warmup_cosine_lr_cycles": {"key": "warmupCosineLRCycles", "type": "str"}, + "warmup_cosine_lr_warmup_epochs": {"key": "warmupCosineLRWarmupEpochs", "type": "str"}, + "weight_decay": {"key": "weightDecay", "type": "str"}, + "box_detections_per_image": {"key": "boxDetectionsPerImage", "type": "str"}, + "box_score_threshold": {"key": "boxScoreThreshold", "type": "str"}, + "image_size": {"key": "imageSize", "type": "str"}, + "max_size": {"key": "maxSize", "type": "str"}, + "min_size": {"key": "minSize", "type": "str"}, + "model_size": {"key": "modelSize", "type": "str"}, + "multi_scale": {"key": "multiScale", "type": "str"}, + "nms_iou_threshold": {"key": "nmsIouThreshold", "type": "str"}, + "tile_grid_size": {"key": "tileGridSize", "type": "str"}, + "tile_overlap_ratio": {"key": "tileOverlapRatio", "type": "str"}, + "tile_predictions_nms_threshold": {"key": "tilePredictionsNmsThreshold", "type": "str"}, + "validation_iou_threshold": {"key": "validationIouThreshold", "type": "str"}, + "validation_metric_type": {"key": "validationMetricType", "type": "str"}, + } + + def __init__(self, **kwargs): """ :keyword ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. :paramtype ams_gradient: str @@ -17889,155 +16926,152 @@ def __init__( :paramtype validation_metric_type: str """ super(ImageModelDistributionSettingsObjectDetection, self).__init__(**kwargs) - self.box_detections_per_image = kwargs.get('box_detections_per_image', None) - self.box_score_threshold = kwargs.get('box_score_threshold', None) - self.image_size = kwargs.get('image_size', None) - self.max_size = kwargs.get('max_size', None) - self.min_size = kwargs.get('min_size', None) - self.model_size = kwargs.get('model_size', None) - self.multi_scale = kwargs.get('multi_scale', None) - self.nms_iou_threshold = kwargs.get('nms_iou_threshold', None) - self.tile_grid_size = kwargs.get('tile_grid_size', None) - self.tile_overlap_ratio = kwargs.get('tile_overlap_ratio', None) - self.tile_predictions_nms_threshold = kwargs.get('tile_predictions_nms_threshold', None) - self.validation_iou_threshold = kwargs.get('validation_iou_threshold', None) - self.validation_metric_type = kwargs.get('validation_metric_type', None) + self.box_detections_per_image = kwargs.get("box_detections_per_image", None) + self.box_score_threshold = kwargs.get("box_score_threshold", None) + self.image_size = kwargs.get("image_size", None) + self.max_size = kwargs.get("max_size", None) + self.min_size = kwargs.get("min_size", None) + self.model_size = kwargs.get("model_size", None) + self.multi_scale = kwargs.get("multi_scale", None) + self.nms_iou_threshold = kwargs.get("nms_iou_threshold", None) + self.tile_grid_size = kwargs.get("tile_grid_size", None) + self.tile_overlap_ratio = kwargs.get("tile_overlap_ratio", None) + self.tile_predictions_nms_threshold = kwargs.get("tile_predictions_nms_threshold", None) + self.validation_iou_threshold = kwargs.get("validation_iou_threshold", None) + self.validation_metric_type = kwargs.get("validation_metric_type", None) class ImageModelSettings(msrest.serialization.Model): """Settings used for training the model. -For more information on the available settings please visit the official documentation: -https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - - :ivar advanced_settings: Settings for advanced scenarios. - :vartype advanced_settings: str - :ivar ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. - :vartype ams_gradient: bool - :ivar augmentations: Settings for using Augmentations. - :vartype augmentations: str - :ivar beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the range - [0, 1]. - :vartype beta1: float - :ivar beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the range - [0, 1]. - :vartype beta2: float - :ivar checkpoint_frequency: Frequency to store model checkpoints. Must be a positive integer. - :vartype checkpoint_frequency: int - :ivar checkpoint_model: The pretrained checkpoint model for incremental training. - :vartype checkpoint_model: ~azure.mgmt.machinelearningservices.models.MLFlowModelJobInput - :ivar checkpoint_run_id: The id of a previous run that has a pretrained checkpoint for - incremental training. - :vartype checkpoint_run_id: str - :ivar distributed: Whether to use distributed training. - :vartype distributed: bool - :ivar early_stopping: Enable early stopping logic during training. - :vartype early_stopping: bool - :ivar early_stopping_delay: Minimum number of epochs or validation evaluations to wait before - primary metric improvement - is tracked for early stopping. Must be a positive integer. - :vartype early_stopping_delay: int - :ivar early_stopping_patience: Minimum number of epochs or validation evaluations with no - primary metric improvement before - the run is stopped. Must be a positive integer. - :vartype early_stopping_patience: int - :ivar enable_onnx_normalization: Enable normalization when exporting ONNX model. - :vartype enable_onnx_normalization: bool - :ivar evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. Must - be a positive integer. - :vartype evaluation_frequency: int - :ivar gradient_accumulation_step: Gradient accumulation means running a configured number of - "GradAccumulationStep" steps without - updating the model weights while accumulating the gradients of those steps, and then using - the accumulated gradients to compute the weight updates. Must be a positive integer. - :vartype gradient_accumulation_step: int - :ivar layers_to_freeze: Number of layers to freeze for the model. Must be a positive integer. - For instance, passing 2 as value for 'seresnext' means - freezing layer0 and layer1. For a full list of models supported and details on layer freeze, - please - see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - :vartype layers_to_freeze: int - :ivar learning_rate: Initial learning rate. Must be a float in the range [0, 1]. - :vartype learning_rate: float - :ivar learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or - 'step'. Possible values include: "None", "WarmupCosine", "Step". - :vartype learning_rate_scheduler: str or - ~azure.mgmt.machinelearningservices.models.LearningRateScheduler - :ivar model_name: Name of the model to use for training. - For more information on the available models please visit the official documentation: - https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - :vartype model_name: str - :ivar momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, 1]. - :vartype momentum: float - :ivar nesterov: Enable nesterov when optimizer is 'sgd'. - :vartype nesterov: bool - :ivar number_of_epochs: Number of training epochs. Must be a positive integer. - :vartype number_of_epochs: int - :ivar number_of_workers: Number of data loader workers. Must be a non-negative integer. - :vartype number_of_workers: int - :ivar optimizer: Type of optimizer. Possible values include: "None", "Sgd", "Adam", "Adamw". - :vartype optimizer: str or ~azure.mgmt.machinelearningservices.models.StochasticOptimizer - :ivar random_seed: Random seed to be used when using deterministic training. - :vartype random_seed: int - :ivar step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float in - the range [0, 1]. - :vartype step_lr_gamma: float - :ivar step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be a - positive integer. - :vartype step_lr_step_size: int - :ivar training_batch_size: Training batch size. Must be a positive integer. - :vartype training_batch_size: int - :ivar validation_batch_size: Validation batch size. Must be a positive integer. - :vartype validation_batch_size: int - :ivar warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is - 'warmup_cosine'. Must be a float in the range [0, 1]. - :vartype warmup_cosine_lr_cycles: float - :ivar warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is - 'warmup_cosine'. Must be a positive integer. - :vartype warmup_cosine_lr_warmup_epochs: int - :ivar weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must be - a float in the range[0, 1]. - :vartype weight_decay: float - """ - - _attribute_map = { - 'advanced_settings': {'key': 'advancedSettings', 'type': 'str'}, - 'ams_gradient': {'key': 'amsGradient', 'type': 'bool'}, - 'augmentations': {'key': 'augmentations', 'type': 'str'}, - 'beta1': {'key': 'beta1', 'type': 'float'}, - 'beta2': {'key': 'beta2', 'type': 'float'}, - 'checkpoint_frequency': {'key': 'checkpointFrequency', 'type': 'int'}, - 'checkpoint_model': {'key': 'checkpointModel', 'type': 'MLFlowModelJobInput'}, - 'checkpoint_run_id': {'key': 'checkpointRunId', 'type': 'str'}, - 'distributed': {'key': 'distributed', 'type': 'bool'}, - 'early_stopping': {'key': 'earlyStopping', 'type': 'bool'}, - 'early_stopping_delay': {'key': 'earlyStoppingDelay', 'type': 'int'}, - 'early_stopping_patience': {'key': 'earlyStoppingPatience', 'type': 'int'}, - 'enable_onnx_normalization': {'key': 'enableOnnxNormalization', 'type': 'bool'}, - 'evaluation_frequency': {'key': 'evaluationFrequency', 'type': 'int'}, - 'gradient_accumulation_step': {'key': 'gradientAccumulationStep', 'type': 'int'}, - 'layers_to_freeze': {'key': 'layersToFreeze', 'type': 'int'}, - 'learning_rate': {'key': 'learningRate', 'type': 'float'}, - 'learning_rate_scheduler': {'key': 'learningRateScheduler', 'type': 'str'}, - 'model_name': {'key': 'modelName', 'type': 'str'}, - 'momentum': {'key': 'momentum', 'type': 'float'}, - 'nesterov': {'key': 'nesterov', 'type': 'bool'}, - 'number_of_epochs': {'key': 'numberOfEpochs', 'type': 'int'}, - 'number_of_workers': {'key': 'numberOfWorkers', 'type': 'int'}, - 'optimizer': {'key': 'optimizer', 'type': 'str'}, - 'random_seed': {'key': 'randomSeed', 'type': 'int'}, - 'step_lr_gamma': {'key': 'stepLRGamma', 'type': 'float'}, - 'step_lr_step_size': {'key': 'stepLRStepSize', 'type': 'int'}, - 'training_batch_size': {'key': 'trainingBatchSize', 'type': 'int'}, - 'validation_batch_size': {'key': 'validationBatchSize', 'type': 'int'}, - 'warmup_cosine_lr_cycles': {'key': 'warmupCosineLRCycles', 'type': 'float'}, - 'warmup_cosine_lr_warmup_epochs': {'key': 'warmupCosineLRWarmupEpochs', 'type': 'int'}, - 'weight_decay': {'key': 'weightDecay', 'type': 'float'}, - } - - def __init__( - self, - **kwargs - ): + For more information on the available settings please visit the official documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + + :ivar advanced_settings: Settings for advanced scenarios. + :vartype advanced_settings: str + :ivar ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. + :vartype ams_gradient: bool + :ivar augmentations: Settings for using Augmentations. + :vartype augmentations: str + :ivar beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the range + [0, 1]. + :vartype beta1: float + :ivar beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the range + [0, 1]. + :vartype beta2: float + :ivar checkpoint_frequency: Frequency to store model checkpoints. Must be a positive integer. + :vartype checkpoint_frequency: int + :ivar checkpoint_model: The pretrained checkpoint model for incremental training. + :vartype checkpoint_model: ~azure.mgmt.machinelearningservices.models.MLFlowModelJobInput + :ivar checkpoint_run_id: The id of a previous run that has a pretrained checkpoint for + incremental training. + :vartype checkpoint_run_id: str + :ivar distributed: Whether to use distributed training. + :vartype distributed: bool + :ivar early_stopping: Enable early stopping logic during training. + :vartype early_stopping: bool + :ivar early_stopping_delay: Minimum number of epochs or validation evaluations to wait before + primary metric improvement + is tracked for early stopping. Must be a positive integer. + :vartype early_stopping_delay: int + :ivar early_stopping_patience: Minimum number of epochs or validation evaluations with no + primary metric improvement before + the run is stopped. Must be a positive integer. + :vartype early_stopping_patience: int + :ivar enable_onnx_normalization: Enable normalization when exporting ONNX model. + :vartype enable_onnx_normalization: bool + :ivar evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. Must + be a positive integer. + :vartype evaluation_frequency: int + :ivar gradient_accumulation_step: Gradient accumulation means running a configured number of + "GradAccumulationStep" steps without + updating the model weights while accumulating the gradients of those steps, and then using + the accumulated gradients to compute the weight updates. Must be a positive integer. + :vartype gradient_accumulation_step: int + :ivar layers_to_freeze: Number of layers to freeze for the model. Must be a positive integer. + For instance, passing 2 as value for 'seresnext' means + freezing layer0 and layer1. For a full list of models supported and details on layer freeze, + please + see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :vartype layers_to_freeze: int + :ivar learning_rate: Initial learning rate. Must be a float in the range [0, 1]. + :vartype learning_rate: float + :ivar learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or + 'step'. Possible values include: "None", "WarmupCosine", "Step". + :vartype learning_rate_scheduler: str or + ~azure.mgmt.machinelearningservices.models.LearningRateScheduler + :ivar model_name: Name of the model to use for training. + For more information on the available models please visit the official documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :vartype model_name: str + :ivar momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, 1]. + :vartype momentum: float + :ivar nesterov: Enable nesterov when optimizer is 'sgd'. + :vartype nesterov: bool + :ivar number_of_epochs: Number of training epochs. Must be a positive integer. + :vartype number_of_epochs: int + :ivar number_of_workers: Number of data loader workers. Must be a non-negative integer. + :vartype number_of_workers: int + :ivar optimizer: Type of optimizer. Possible values include: "None", "Sgd", "Adam", "Adamw". + :vartype optimizer: str or ~azure.mgmt.machinelearningservices.models.StochasticOptimizer + :ivar random_seed: Random seed to be used when using deterministic training. + :vartype random_seed: int + :ivar step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float in + the range [0, 1]. + :vartype step_lr_gamma: float + :ivar step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be a + positive integer. + :vartype step_lr_step_size: int + :ivar training_batch_size: Training batch size. Must be a positive integer. + :vartype training_batch_size: int + :ivar validation_batch_size: Validation batch size. Must be a positive integer. + :vartype validation_batch_size: int + :ivar warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is + 'warmup_cosine'. Must be a float in the range [0, 1]. + :vartype warmup_cosine_lr_cycles: float + :ivar warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is + 'warmup_cosine'. Must be a positive integer. + :vartype warmup_cosine_lr_warmup_epochs: int + :ivar weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must be + a float in the range[0, 1]. + :vartype weight_decay: float + """ + + _attribute_map = { + "advanced_settings": {"key": "advancedSettings", "type": "str"}, + "ams_gradient": {"key": "amsGradient", "type": "bool"}, + "augmentations": {"key": "augmentations", "type": "str"}, + "beta1": {"key": "beta1", "type": "float"}, + "beta2": {"key": "beta2", "type": "float"}, + "checkpoint_frequency": {"key": "checkpointFrequency", "type": "int"}, + "checkpoint_model": {"key": "checkpointModel", "type": "MLFlowModelJobInput"}, + "checkpoint_run_id": {"key": "checkpointRunId", "type": "str"}, + "distributed": {"key": "distributed", "type": "bool"}, + "early_stopping": {"key": "earlyStopping", "type": "bool"}, + "early_stopping_delay": {"key": "earlyStoppingDelay", "type": "int"}, + "early_stopping_patience": {"key": "earlyStoppingPatience", "type": "int"}, + "enable_onnx_normalization": {"key": "enableOnnxNormalization", "type": "bool"}, + "evaluation_frequency": {"key": "evaluationFrequency", "type": "int"}, + "gradient_accumulation_step": {"key": "gradientAccumulationStep", "type": "int"}, + "layers_to_freeze": {"key": "layersToFreeze", "type": "int"}, + "learning_rate": {"key": "learningRate", "type": "float"}, + "learning_rate_scheduler": {"key": "learningRateScheduler", "type": "str"}, + "model_name": {"key": "modelName", "type": "str"}, + "momentum": {"key": "momentum", "type": "float"}, + "nesterov": {"key": "nesterov", "type": "bool"}, + "number_of_epochs": {"key": "numberOfEpochs", "type": "int"}, + "number_of_workers": {"key": "numberOfWorkers", "type": "int"}, + "optimizer": {"key": "optimizer", "type": "str"}, + "random_seed": {"key": "randomSeed", "type": "int"}, + "step_lr_gamma": {"key": "stepLRGamma", "type": "float"}, + "step_lr_step_size": {"key": "stepLRStepSize", "type": "int"}, + "training_batch_size": {"key": "trainingBatchSize", "type": "int"}, + "validation_batch_size": {"key": "validationBatchSize", "type": "int"}, + "warmup_cosine_lr_cycles": {"key": "warmupCosineLRCycles", "type": "float"}, + "warmup_cosine_lr_warmup_epochs": {"key": "warmupCosineLRWarmupEpochs", "type": "int"}, + "weight_decay": {"key": "weightDecay", "type": "float"}, + } + + def __init__(self, **kwargs): """ :keyword advanced_settings: Settings for advanced scenarios. :paramtype advanced_settings: str @@ -18132,191 +17166,188 @@ def __init__( :paramtype weight_decay: float """ super(ImageModelSettings, self).__init__(**kwargs) - self.advanced_settings = kwargs.get('advanced_settings', None) - self.ams_gradient = kwargs.get('ams_gradient', None) - self.augmentations = kwargs.get('augmentations', None) - self.beta1 = kwargs.get('beta1', None) - self.beta2 = kwargs.get('beta2', None) - self.checkpoint_frequency = kwargs.get('checkpoint_frequency', None) - self.checkpoint_model = kwargs.get('checkpoint_model', None) - self.checkpoint_run_id = kwargs.get('checkpoint_run_id', None) - self.distributed = kwargs.get('distributed', None) - self.early_stopping = kwargs.get('early_stopping', None) - self.early_stopping_delay = kwargs.get('early_stopping_delay', None) - self.early_stopping_patience = kwargs.get('early_stopping_patience', None) - self.enable_onnx_normalization = kwargs.get('enable_onnx_normalization', None) - self.evaluation_frequency = kwargs.get('evaluation_frequency', None) - self.gradient_accumulation_step = kwargs.get('gradient_accumulation_step', None) - self.layers_to_freeze = kwargs.get('layers_to_freeze', None) - self.learning_rate = kwargs.get('learning_rate', None) - self.learning_rate_scheduler = kwargs.get('learning_rate_scheduler', None) - self.model_name = kwargs.get('model_name', None) - self.momentum = kwargs.get('momentum', None) - self.nesterov = kwargs.get('nesterov', None) - self.number_of_epochs = kwargs.get('number_of_epochs', None) - self.number_of_workers = kwargs.get('number_of_workers', None) - self.optimizer = kwargs.get('optimizer', None) - self.random_seed = kwargs.get('random_seed', None) - self.step_lr_gamma = kwargs.get('step_lr_gamma', None) - self.step_lr_step_size = kwargs.get('step_lr_step_size', None) - self.training_batch_size = kwargs.get('training_batch_size', None) - self.validation_batch_size = kwargs.get('validation_batch_size', None) - self.warmup_cosine_lr_cycles = kwargs.get('warmup_cosine_lr_cycles', None) - self.warmup_cosine_lr_warmup_epochs = kwargs.get('warmup_cosine_lr_warmup_epochs', None) - self.weight_decay = kwargs.get('weight_decay', None) + self.advanced_settings = kwargs.get("advanced_settings", None) + self.ams_gradient = kwargs.get("ams_gradient", None) + self.augmentations = kwargs.get("augmentations", None) + self.beta1 = kwargs.get("beta1", None) + self.beta2 = kwargs.get("beta2", None) + self.checkpoint_frequency = kwargs.get("checkpoint_frequency", None) + self.checkpoint_model = kwargs.get("checkpoint_model", None) + self.checkpoint_run_id = kwargs.get("checkpoint_run_id", None) + self.distributed = kwargs.get("distributed", None) + self.early_stopping = kwargs.get("early_stopping", None) + self.early_stopping_delay = kwargs.get("early_stopping_delay", None) + self.early_stopping_patience = kwargs.get("early_stopping_patience", None) + self.enable_onnx_normalization = kwargs.get("enable_onnx_normalization", None) + self.evaluation_frequency = kwargs.get("evaluation_frequency", None) + self.gradient_accumulation_step = kwargs.get("gradient_accumulation_step", None) + self.layers_to_freeze = kwargs.get("layers_to_freeze", None) + self.learning_rate = kwargs.get("learning_rate", None) + self.learning_rate_scheduler = kwargs.get("learning_rate_scheduler", None) + self.model_name = kwargs.get("model_name", None) + self.momentum = kwargs.get("momentum", None) + self.nesterov = kwargs.get("nesterov", None) + self.number_of_epochs = kwargs.get("number_of_epochs", None) + self.number_of_workers = kwargs.get("number_of_workers", None) + self.optimizer = kwargs.get("optimizer", None) + self.random_seed = kwargs.get("random_seed", None) + self.step_lr_gamma = kwargs.get("step_lr_gamma", None) + self.step_lr_step_size = kwargs.get("step_lr_step_size", None) + self.training_batch_size = kwargs.get("training_batch_size", None) + self.validation_batch_size = kwargs.get("validation_batch_size", None) + self.warmup_cosine_lr_cycles = kwargs.get("warmup_cosine_lr_cycles", None) + self.warmup_cosine_lr_warmup_epochs = kwargs.get("warmup_cosine_lr_warmup_epochs", None) + self.weight_decay = kwargs.get("weight_decay", None) class ImageModelSettingsClassification(ImageModelSettings): """Settings used for training the model. -For more information on the available settings please visit the official documentation: -https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - - :ivar advanced_settings: Settings for advanced scenarios. - :vartype advanced_settings: str - :ivar ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. - :vartype ams_gradient: bool - :ivar augmentations: Settings for using Augmentations. - :vartype augmentations: str - :ivar beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the range - [0, 1]. - :vartype beta1: float - :ivar beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the range - [0, 1]. - :vartype beta2: float - :ivar checkpoint_frequency: Frequency to store model checkpoints. Must be a positive integer. - :vartype checkpoint_frequency: int - :ivar checkpoint_model: The pretrained checkpoint model for incremental training. - :vartype checkpoint_model: ~azure.mgmt.machinelearningservices.models.MLFlowModelJobInput - :ivar checkpoint_run_id: The id of a previous run that has a pretrained checkpoint for - incremental training. - :vartype checkpoint_run_id: str - :ivar distributed: Whether to use distributed training. - :vartype distributed: bool - :ivar early_stopping: Enable early stopping logic during training. - :vartype early_stopping: bool - :ivar early_stopping_delay: Minimum number of epochs or validation evaluations to wait before - primary metric improvement - is tracked for early stopping. Must be a positive integer. - :vartype early_stopping_delay: int - :ivar early_stopping_patience: Minimum number of epochs or validation evaluations with no - primary metric improvement before - the run is stopped. Must be a positive integer. - :vartype early_stopping_patience: int - :ivar enable_onnx_normalization: Enable normalization when exporting ONNX model. - :vartype enable_onnx_normalization: bool - :ivar evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. Must - be a positive integer. - :vartype evaluation_frequency: int - :ivar gradient_accumulation_step: Gradient accumulation means running a configured number of - "GradAccumulationStep" steps without - updating the model weights while accumulating the gradients of those steps, and then using - the accumulated gradients to compute the weight updates. Must be a positive integer. - :vartype gradient_accumulation_step: int - :ivar layers_to_freeze: Number of layers to freeze for the model. Must be a positive integer. - For instance, passing 2 as value for 'seresnext' means - freezing layer0 and layer1. For a full list of models supported and details on layer freeze, - please - see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - :vartype layers_to_freeze: int - :ivar learning_rate: Initial learning rate. Must be a float in the range [0, 1]. - :vartype learning_rate: float - :ivar learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or - 'step'. Possible values include: "None", "WarmupCosine", "Step". - :vartype learning_rate_scheduler: str or - ~azure.mgmt.machinelearningservices.models.LearningRateScheduler - :ivar model_name: Name of the model to use for training. - For more information on the available models please visit the official documentation: - https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - :vartype model_name: str - :ivar momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, 1]. - :vartype momentum: float - :ivar nesterov: Enable nesterov when optimizer is 'sgd'. - :vartype nesterov: bool - :ivar number_of_epochs: Number of training epochs. Must be a positive integer. - :vartype number_of_epochs: int - :ivar number_of_workers: Number of data loader workers. Must be a non-negative integer. - :vartype number_of_workers: int - :ivar optimizer: Type of optimizer. Possible values include: "None", "Sgd", "Adam", "Adamw". - :vartype optimizer: str or ~azure.mgmt.machinelearningservices.models.StochasticOptimizer - :ivar random_seed: Random seed to be used when using deterministic training. - :vartype random_seed: int - :ivar step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float in - the range [0, 1]. - :vartype step_lr_gamma: float - :ivar step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be a - positive integer. - :vartype step_lr_step_size: int - :ivar training_batch_size: Training batch size. Must be a positive integer. - :vartype training_batch_size: int - :ivar validation_batch_size: Validation batch size. Must be a positive integer. - :vartype validation_batch_size: int - :ivar warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is - 'warmup_cosine'. Must be a float in the range [0, 1]. - :vartype warmup_cosine_lr_cycles: float - :ivar warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is - 'warmup_cosine'. Must be a positive integer. - :vartype warmup_cosine_lr_warmup_epochs: int - :ivar weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must be - a float in the range[0, 1]. - :vartype weight_decay: float - :ivar training_crop_size: Image crop size that is input to the neural network for the training - dataset. Must be a positive integer. - :vartype training_crop_size: int - :ivar validation_crop_size: Image crop size that is input to the neural network for the - validation dataset. Must be a positive integer. - :vartype validation_crop_size: int - :ivar validation_resize_size: Image size to which to resize before cropping for validation - dataset. Must be a positive integer. - :vartype validation_resize_size: int - :ivar weighted_loss: Weighted loss. The accepted values are 0 for no weighted loss. - 1 for weighted loss with sqrt.(class_weights). 2 for weighted loss with class_weights. Must be - 0 or 1 or 2. - :vartype weighted_loss: int - """ - - _attribute_map = { - 'advanced_settings': {'key': 'advancedSettings', 'type': 'str'}, - 'ams_gradient': {'key': 'amsGradient', 'type': 'bool'}, - 'augmentations': {'key': 'augmentations', 'type': 'str'}, - 'beta1': {'key': 'beta1', 'type': 'float'}, - 'beta2': {'key': 'beta2', 'type': 'float'}, - 'checkpoint_frequency': {'key': 'checkpointFrequency', 'type': 'int'}, - 'checkpoint_model': {'key': 'checkpointModel', 'type': 'MLFlowModelJobInput'}, - 'checkpoint_run_id': {'key': 'checkpointRunId', 'type': 'str'}, - 'distributed': {'key': 'distributed', 'type': 'bool'}, - 'early_stopping': {'key': 'earlyStopping', 'type': 'bool'}, - 'early_stopping_delay': {'key': 'earlyStoppingDelay', 'type': 'int'}, - 'early_stopping_patience': {'key': 'earlyStoppingPatience', 'type': 'int'}, - 'enable_onnx_normalization': {'key': 'enableOnnxNormalization', 'type': 'bool'}, - 'evaluation_frequency': {'key': 'evaluationFrequency', 'type': 'int'}, - 'gradient_accumulation_step': {'key': 'gradientAccumulationStep', 'type': 'int'}, - 'layers_to_freeze': {'key': 'layersToFreeze', 'type': 'int'}, - 'learning_rate': {'key': 'learningRate', 'type': 'float'}, - 'learning_rate_scheduler': {'key': 'learningRateScheduler', 'type': 'str'}, - 'model_name': {'key': 'modelName', 'type': 'str'}, - 'momentum': {'key': 'momentum', 'type': 'float'}, - 'nesterov': {'key': 'nesterov', 'type': 'bool'}, - 'number_of_epochs': {'key': 'numberOfEpochs', 'type': 'int'}, - 'number_of_workers': {'key': 'numberOfWorkers', 'type': 'int'}, - 'optimizer': {'key': 'optimizer', 'type': 'str'}, - 'random_seed': {'key': 'randomSeed', 'type': 'int'}, - 'step_lr_gamma': {'key': 'stepLRGamma', 'type': 'float'}, - 'step_lr_step_size': {'key': 'stepLRStepSize', 'type': 'int'}, - 'training_batch_size': {'key': 'trainingBatchSize', 'type': 'int'}, - 'validation_batch_size': {'key': 'validationBatchSize', 'type': 'int'}, - 'warmup_cosine_lr_cycles': {'key': 'warmupCosineLRCycles', 'type': 'float'}, - 'warmup_cosine_lr_warmup_epochs': {'key': 'warmupCosineLRWarmupEpochs', 'type': 'int'}, - 'weight_decay': {'key': 'weightDecay', 'type': 'float'}, - 'training_crop_size': {'key': 'trainingCropSize', 'type': 'int'}, - 'validation_crop_size': {'key': 'validationCropSize', 'type': 'int'}, - 'validation_resize_size': {'key': 'validationResizeSize', 'type': 'int'}, - 'weighted_loss': {'key': 'weightedLoss', 'type': 'int'}, - } - - def __init__( - self, - **kwargs - ): + For more information on the available settings please visit the official documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + + :ivar advanced_settings: Settings for advanced scenarios. + :vartype advanced_settings: str + :ivar ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. + :vartype ams_gradient: bool + :ivar augmentations: Settings for using Augmentations. + :vartype augmentations: str + :ivar beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the range + [0, 1]. + :vartype beta1: float + :ivar beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the range + [0, 1]. + :vartype beta2: float + :ivar checkpoint_frequency: Frequency to store model checkpoints. Must be a positive integer. + :vartype checkpoint_frequency: int + :ivar checkpoint_model: The pretrained checkpoint model for incremental training. + :vartype checkpoint_model: ~azure.mgmt.machinelearningservices.models.MLFlowModelJobInput + :ivar checkpoint_run_id: The id of a previous run that has a pretrained checkpoint for + incremental training. + :vartype checkpoint_run_id: str + :ivar distributed: Whether to use distributed training. + :vartype distributed: bool + :ivar early_stopping: Enable early stopping logic during training. + :vartype early_stopping: bool + :ivar early_stopping_delay: Minimum number of epochs or validation evaluations to wait before + primary metric improvement + is tracked for early stopping. Must be a positive integer. + :vartype early_stopping_delay: int + :ivar early_stopping_patience: Minimum number of epochs or validation evaluations with no + primary metric improvement before + the run is stopped. Must be a positive integer. + :vartype early_stopping_patience: int + :ivar enable_onnx_normalization: Enable normalization when exporting ONNX model. + :vartype enable_onnx_normalization: bool + :ivar evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. Must + be a positive integer. + :vartype evaluation_frequency: int + :ivar gradient_accumulation_step: Gradient accumulation means running a configured number of + "GradAccumulationStep" steps without + updating the model weights while accumulating the gradients of those steps, and then using + the accumulated gradients to compute the weight updates. Must be a positive integer. + :vartype gradient_accumulation_step: int + :ivar layers_to_freeze: Number of layers to freeze for the model. Must be a positive integer. + For instance, passing 2 as value for 'seresnext' means + freezing layer0 and layer1. For a full list of models supported and details on layer freeze, + please + see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :vartype layers_to_freeze: int + :ivar learning_rate: Initial learning rate. Must be a float in the range [0, 1]. + :vartype learning_rate: float + :ivar learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or + 'step'. Possible values include: "None", "WarmupCosine", "Step". + :vartype learning_rate_scheduler: str or + ~azure.mgmt.machinelearningservices.models.LearningRateScheduler + :ivar model_name: Name of the model to use for training. + For more information on the available models please visit the official documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :vartype model_name: str + :ivar momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, 1]. + :vartype momentum: float + :ivar nesterov: Enable nesterov when optimizer is 'sgd'. + :vartype nesterov: bool + :ivar number_of_epochs: Number of training epochs. Must be a positive integer. + :vartype number_of_epochs: int + :ivar number_of_workers: Number of data loader workers. Must be a non-negative integer. + :vartype number_of_workers: int + :ivar optimizer: Type of optimizer. Possible values include: "None", "Sgd", "Adam", "Adamw". + :vartype optimizer: str or ~azure.mgmt.machinelearningservices.models.StochasticOptimizer + :ivar random_seed: Random seed to be used when using deterministic training. + :vartype random_seed: int + :ivar step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float in + the range [0, 1]. + :vartype step_lr_gamma: float + :ivar step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be a + positive integer. + :vartype step_lr_step_size: int + :ivar training_batch_size: Training batch size. Must be a positive integer. + :vartype training_batch_size: int + :ivar validation_batch_size: Validation batch size. Must be a positive integer. + :vartype validation_batch_size: int + :ivar warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is + 'warmup_cosine'. Must be a float in the range [0, 1]. + :vartype warmup_cosine_lr_cycles: float + :ivar warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is + 'warmup_cosine'. Must be a positive integer. + :vartype warmup_cosine_lr_warmup_epochs: int + :ivar weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must be + a float in the range[0, 1]. + :vartype weight_decay: float + :ivar training_crop_size: Image crop size that is input to the neural network for the training + dataset. Must be a positive integer. + :vartype training_crop_size: int + :ivar validation_crop_size: Image crop size that is input to the neural network for the + validation dataset. Must be a positive integer. + :vartype validation_crop_size: int + :ivar validation_resize_size: Image size to which to resize before cropping for validation + dataset. Must be a positive integer. + :vartype validation_resize_size: int + :ivar weighted_loss: Weighted loss. The accepted values are 0 for no weighted loss. + 1 for weighted loss with sqrt.(class_weights). 2 for weighted loss with class_weights. Must be + 0 or 1 or 2. + :vartype weighted_loss: int + """ + + _attribute_map = { + "advanced_settings": {"key": "advancedSettings", "type": "str"}, + "ams_gradient": {"key": "amsGradient", "type": "bool"}, + "augmentations": {"key": "augmentations", "type": "str"}, + "beta1": {"key": "beta1", "type": "float"}, + "beta2": {"key": "beta2", "type": "float"}, + "checkpoint_frequency": {"key": "checkpointFrequency", "type": "int"}, + "checkpoint_model": {"key": "checkpointModel", "type": "MLFlowModelJobInput"}, + "checkpoint_run_id": {"key": "checkpointRunId", "type": "str"}, + "distributed": {"key": "distributed", "type": "bool"}, + "early_stopping": {"key": "earlyStopping", "type": "bool"}, + "early_stopping_delay": {"key": "earlyStoppingDelay", "type": "int"}, + "early_stopping_patience": {"key": "earlyStoppingPatience", "type": "int"}, + "enable_onnx_normalization": {"key": "enableOnnxNormalization", "type": "bool"}, + "evaluation_frequency": {"key": "evaluationFrequency", "type": "int"}, + "gradient_accumulation_step": {"key": "gradientAccumulationStep", "type": "int"}, + "layers_to_freeze": {"key": "layersToFreeze", "type": "int"}, + "learning_rate": {"key": "learningRate", "type": "float"}, + "learning_rate_scheduler": {"key": "learningRateScheduler", "type": "str"}, + "model_name": {"key": "modelName", "type": "str"}, + "momentum": {"key": "momentum", "type": "float"}, + "nesterov": {"key": "nesterov", "type": "bool"}, + "number_of_epochs": {"key": "numberOfEpochs", "type": "int"}, + "number_of_workers": {"key": "numberOfWorkers", "type": "int"}, + "optimizer": {"key": "optimizer", "type": "str"}, + "random_seed": {"key": "randomSeed", "type": "int"}, + "step_lr_gamma": {"key": "stepLRGamma", "type": "float"}, + "step_lr_step_size": {"key": "stepLRStepSize", "type": "int"}, + "training_batch_size": {"key": "trainingBatchSize", "type": "int"}, + "validation_batch_size": {"key": "validationBatchSize", "type": "int"}, + "warmup_cosine_lr_cycles": {"key": "warmupCosineLRCycles", "type": "float"}, + "warmup_cosine_lr_warmup_epochs": {"key": "warmupCosineLRWarmupEpochs", "type": "int"}, + "weight_decay": {"key": "weightDecay", "type": "float"}, + "training_crop_size": {"key": "trainingCropSize", "type": "int"}, + "validation_crop_size": {"key": "validationCropSize", "type": "int"}, + "validation_resize_size": {"key": "validationResizeSize", "type": "int"}, + "weighted_loss": {"key": "weightedLoss", "type": "int"}, + } + + def __init__(self, **kwargs): """ :keyword advanced_settings: Settings for advanced scenarios. :paramtype advanced_settings: str @@ -18424,222 +17455,219 @@ def __init__( :paramtype weighted_loss: int """ super(ImageModelSettingsClassification, self).__init__(**kwargs) - self.training_crop_size = kwargs.get('training_crop_size', None) - self.validation_crop_size = kwargs.get('validation_crop_size', None) - self.validation_resize_size = kwargs.get('validation_resize_size', None) - self.weighted_loss = kwargs.get('weighted_loss', None) + self.training_crop_size = kwargs.get("training_crop_size", None) + self.validation_crop_size = kwargs.get("validation_crop_size", None) + self.validation_resize_size = kwargs.get("validation_resize_size", None) + self.weighted_loss = kwargs.get("weighted_loss", None) + +class ImageModelSettingsObjectDetection(ImageModelSettings): + """Settings used for training the model. + For more information on the available settings please visit the official documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + + :ivar advanced_settings: Settings for advanced scenarios. + :vartype advanced_settings: str + :ivar ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. + :vartype ams_gradient: bool + :ivar augmentations: Settings for using Augmentations. + :vartype augmentations: str + :ivar beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the range + [0, 1]. + :vartype beta1: float + :ivar beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the range + [0, 1]. + :vartype beta2: float + :ivar checkpoint_frequency: Frequency to store model checkpoints. Must be a positive integer. + :vartype checkpoint_frequency: int + :ivar checkpoint_model: The pretrained checkpoint model for incremental training. + :vartype checkpoint_model: ~azure.mgmt.machinelearningservices.models.MLFlowModelJobInput + :ivar checkpoint_run_id: The id of a previous run that has a pretrained checkpoint for + incremental training. + :vartype checkpoint_run_id: str + :ivar distributed: Whether to use distributed training. + :vartype distributed: bool + :ivar early_stopping: Enable early stopping logic during training. + :vartype early_stopping: bool + :ivar early_stopping_delay: Minimum number of epochs or validation evaluations to wait before + primary metric improvement + is tracked for early stopping. Must be a positive integer. + :vartype early_stopping_delay: int + :ivar early_stopping_patience: Minimum number of epochs or validation evaluations with no + primary metric improvement before + the run is stopped. Must be a positive integer. + :vartype early_stopping_patience: int + :ivar enable_onnx_normalization: Enable normalization when exporting ONNX model. + :vartype enable_onnx_normalization: bool + :ivar evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. Must + be a positive integer. + :vartype evaluation_frequency: int + :ivar gradient_accumulation_step: Gradient accumulation means running a configured number of + "GradAccumulationStep" steps without + updating the model weights while accumulating the gradients of those steps, and then using + the accumulated gradients to compute the weight updates. Must be a positive integer. + :vartype gradient_accumulation_step: int + :ivar layers_to_freeze: Number of layers to freeze for the model. Must be a positive integer. + For instance, passing 2 as value for 'seresnext' means + freezing layer0 and layer1. For a full list of models supported and details on layer freeze, + please + see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :vartype layers_to_freeze: int + :ivar learning_rate: Initial learning rate. Must be a float in the range [0, 1]. + :vartype learning_rate: float + :ivar learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or + 'step'. Possible values include: "None", "WarmupCosine", "Step". + :vartype learning_rate_scheduler: str or + ~azure.mgmt.machinelearningservices.models.LearningRateScheduler + :ivar model_name: Name of the model to use for training. + For more information on the available models please visit the official documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :vartype model_name: str + :ivar momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, 1]. + :vartype momentum: float + :ivar nesterov: Enable nesterov when optimizer is 'sgd'. + :vartype nesterov: bool + :ivar number_of_epochs: Number of training epochs. Must be a positive integer. + :vartype number_of_epochs: int + :ivar number_of_workers: Number of data loader workers. Must be a non-negative integer. + :vartype number_of_workers: int + :ivar optimizer: Type of optimizer. Possible values include: "None", "Sgd", "Adam", "Adamw". + :vartype optimizer: str or ~azure.mgmt.machinelearningservices.models.StochasticOptimizer + :ivar random_seed: Random seed to be used when using deterministic training. + :vartype random_seed: int + :ivar step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float in + the range [0, 1]. + :vartype step_lr_gamma: float + :ivar step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be a + positive integer. + :vartype step_lr_step_size: int + :ivar training_batch_size: Training batch size. Must be a positive integer. + :vartype training_batch_size: int + :ivar validation_batch_size: Validation batch size. Must be a positive integer. + :vartype validation_batch_size: int + :ivar warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is + 'warmup_cosine'. Must be a float in the range [0, 1]. + :vartype warmup_cosine_lr_cycles: float + :ivar warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is + 'warmup_cosine'. Must be a positive integer. + :vartype warmup_cosine_lr_warmup_epochs: int + :ivar weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must be + a float in the range[0, 1]. + :vartype weight_decay: float + :ivar box_detections_per_image: Maximum number of detections per image, for all classes. Must + be a positive integer. + Note: This settings is not supported for the 'yolov5' algorithm. + :vartype box_detections_per_image: int + :ivar box_score_threshold: During inference, only return proposals with a classification score + greater than + BoxScoreThreshold. Must be a float in the range[0, 1]. + :vartype box_score_threshold: float + :ivar image_size: Image size for train and validation. Must be a positive integer. + Note: The training run may get into CUDA OOM if the size is too big. + Note: This settings is only supported for the 'yolov5' algorithm. + :vartype image_size: int + :ivar log_training_metrics: Enable computing and logging training metrics. Possible values + include: "Enable", "Disable". + :vartype log_training_metrics: str or + ~azure.mgmt.machinelearningservices.models.LogTrainingMetrics + :ivar log_validation_loss: Enable computing and logging validation loss. Possible values + include: "Enable", "Disable". + :vartype log_validation_loss: str or + ~azure.mgmt.machinelearningservices.models.LogValidationLoss + :ivar max_size: Maximum size of the image to be rescaled before feeding it to the backbone. + Must be a positive integer. Note: training run may get into CUDA OOM if the size is too big. + Note: This settings is not supported for the 'yolov5' algorithm. + :vartype max_size: int + :ivar min_size: Minimum size of the image to be rescaled before feeding it to the backbone. + Must be a positive integer. Note: training run may get into CUDA OOM if the size is too big. + Note: This settings is not supported for the 'yolov5' algorithm. + :vartype min_size: int + :ivar model_size: Model size. Must be 'small', 'medium', 'large', or 'xlarge'. + Note: training run may get into CUDA OOM if the model size is too big. + Note: This settings is only supported for the 'yolov5' algorithm. Possible values include: + "None", "Small", "Medium", "Large", "ExtraLarge". + :vartype model_size: str or ~azure.mgmt.machinelearningservices.models.ModelSize + :ivar multi_scale: Enable multi-scale image by varying image size by +/- 50%. + Note: training run may get into CUDA OOM if no sufficient GPU memory. + Note: This settings is only supported for the 'yolov5' algorithm. + :vartype multi_scale: bool + :ivar nms_iou_threshold: IOU threshold used during inference in NMS post processing. Must be a + float in the range [0, 1]. + :vartype nms_iou_threshold: float + :ivar tile_grid_size: The grid size to use for tiling each image. Note: TileGridSize must not + be + None to enable small object detection logic. A string containing two integers in mxn format. + Note: This settings is not supported for the 'yolov5' algorithm. + :vartype tile_grid_size: str + :ivar tile_overlap_ratio: Overlap ratio between adjacent tiles in each dimension. Must be float + in the range [0, 1). + Note: This settings is not supported for the 'yolov5' algorithm. + :vartype tile_overlap_ratio: float + :ivar tile_predictions_nms_threshold: The IOU threshold to use to perform NMS while merging + predictions from tiles and image. + Used in validation/ inference. Must be float in the range [0, 1]. + Note: This settings is not supported for the 'yolov5' algorithm. + :vartype tile_predictions_nms_threshold: float + :ivar validation_iou_threshold: IOU threshold to use when computing validation metric. Must be + float in the range [0, 1]. + :vartype validation_iou_threshold: float + :ivar validation_metric_type: Metric computation method to use for validation metrics. Possible + values include: "None", "Coco", "Voc", "CocoVoc". + :vartype validation_metric_type: str or + ~azure.mgmt.machinelearningservices.models.ValidationMetricType + """ -class ImageModelSettingsObjectDetection(ImageModelSettings): - """Settings used for training the model. -For more information on the available settings please visit the official documentation: -https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - - :ivar advanced_settings: Settings for advanced scenarios. - :vartype advanced_settings: str - :ivar ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. - :vartype ams_gradient: bool - :ivar augmentations: Settings for using Augmentations. - :vartype augmentations: str - :ivar beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the range - [0, 1]. - :vartype beta1: float - :ivar beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the range - [0, 1]. - :vartype beta2: float - :ivar checkpoint_frequency: Frequency to store model checkpoints. Must be a positive integer. - :vartype checkpoint_frequency: int - :ivar checkpoint_model: The pretrained checkpoint model for incremental training. - :vartype checkpoint_model: ~azure.mgmt.machinelearningservices.models.MLFlowModelJobInput - :ivar checkpoint_run_id: The id of a previous run that has a pretrained checkpoint for - incremental training. - :vartype checkpoint_run_id: str - :ivar distributed: Whether to use distributed training. - :vartype distributed: bool - :ivar early_stopping: Enable early stopping logic during training. - :vartype early_stopping: bool - :ivar early_stopping_delay: Minimum number of epochs or validation evaluations to wait before - primary metric improvement - is tracked for early stopping. Must be a positive integer. - :vartype early_stopping_delay: int - :ivar early_stopping_patience: Minimum number of epochs or validation evaluations with no - primary metric improvement before - the run is stopped. Must be a positive integer. - :vartype early_stopping_patience: int - :ivar enable_onnx_normalization: Enable normalization when exporting ONNX model. - :vartype enable_onnx_normalization: bool - :ivar evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. Must - be a positive integer. - :vartype evaluation_frequency: int - :ivar gradient_accumulation_step: Gradient accumulation means running a configured number of - "GradAccumulationStep" steps without - updating the model weights while accumulating the gradients of those steps, and then using - the accumulated gradients to compute the weight updates. Must be a positive integer. - :vartype gradient_accumulation_step: int - :ivar layers_to_freeze: Number of layers to freeze for the model. Must be a positive integer. - For instance, passing 2 as value for 'seresnext' means - freezing layer0 and layer1. For a full list of models supported and details on layer freeze, - please - see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - :vartype layers_to_freeze: int - :ivar learning_rate: Initial learning rate. Must be a float in the range [0, 1]. - :vartype learning_rate: float - :ivar learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or - 'step'. Possible values include: "None", "WarmupCosine", "Step". - :vartype learning_rate_scheduler: str or - ~azure.mgmt.machinelearningservices.models.LearningRateScheduler - :ivar model_name: Name of the model to use for training. - For more information on the available models please visit the official documentation: - https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - :vartype model_name: str - :ivar momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, 1]. - :vartype momentum: float - :ivar nesterov: Enable nesterov when optimizer is 'sgd'. - :vartype nesterov: bool - :ivar number_of_epochs: Number of training epochs. Must be a positive integer. - :vartype number_of_epochs: int - :ivar number_of_workers: Number of data loader workers. Must be a non-negative integer. - :vartype number_of_workers: int - :ivar optimizer: Type of optimizer. Possible values include: "None", "Sgd", "Adam", "Adamw". - :vartype optimizer: str or ~azure.mgmt.machinelearningservices.models.StochasticOptimizer - :ivar random_seed: Random seed to be used when using deterministic training. - :vartype random_seed: int - :ivar step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float in - the range [0, 1]. - :vartype step_lr_gamma: float - :ivar step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be a - positive integer. - :vartype step_lr_step_size: int - :ivar training_batch_size: Training batch size. Must be a positive integer. - :vartype training_batch_size: int - :ivar validation_batch_size: Validation batch size. Must be a positive integer. - :vartype validation_batch_size: int - :ivar warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is - 'warmup_cosine'. Must be a float in the range [0, 1]. - :vartype warmup_cosine_lr_cycles: float - :ivar warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is - 'warmup_cosine'. Must be a positive integer. - :vartype warmup_cosine_lr_warmup_epochs: int - :ivar weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must be - a float in the range[0, 1]. - :vartype weight_decay: float - :ivar box_detections_per_image: Maximum number of detections per image, for all classes. Must - be a positive integer. - Note: This settings is not supported for the 'yolov5' algorithm. - :vartype box_detections_per_image: int - :ivar box_score_threshold: During inference, only return proposals with a classification score - greater than - BoxScoreThreshold. Must be a float in the range[0, 1]. - :vartype box_score_threshold: float - :ivar image_size: Image size for train and validation. Must be a positive integer. - Note: The training run may get into CUDA OOM if the size is too big. - Note: This settings is only supported for the 'yolov5' algorithm. - :vartype image_size: int - :ivar log_training_metrics: Enable computing and logging training metrics. Possible values - include: "Enable", "Disable". - :vartype log_training_metrics: str or - ~azure.mgmt.machinelearningservices.models.LogTrainingMetrics - :ivar log_validation_loss: Enable computing and logging validation loss. Possible values - include: "Enable", "Disable". - :vartype log_validation_loss: str or - ~azure.mgmt.machinelearningservices.models.LogValidationLoss - :ivar max_size: Maximum size of the image to be rescaled before feeding it to the backbone. - Must be a positive integer. Note: training run may get into CUDA OOM if the size is too big. - Note: This settings is not supported for the 'yolov5' algorithm. - :vartype max_size: int - :ivar min_size: Minimum size of the image to be rescaled before feeding it to the backbone. - Must be a positive integer. Note: training run may get into CUDA OOM if the size is too big. - Note: This settings is not supported for the 'yolov5' algorithm. - :vartype min_size: int - :ivar model_size: Model size. Must be 'small', 'medium', 'large', or 'xlarge'. - Note: training run may get into CUDA OOM if the model size is too big. - Note: This settings is only supported for the 'yolov5' algorithm. Possible values include: - "None", "Small", "Medium", "Large", "ExtraLarge". - :vartype model_size: str or ~azure.mgmt.machinelearningservices.models.ModelSize - :ivar multi_scale: Enable multi-scale image by varying image size by +/- 50%. - Note: training run may get into CUDA OOM if no sufficient GPU memory. - Note: This settings is only supported for the 'yolov5' algorithm. - :vartype multi_scale: bool - :ivar nms_iou_threshold: IOU threshold used during inference in NMS post processing. Must be a - float in the range [0, 1]. - :vartype nms_iou_threshold: float - :ivar tile_grid_size: The grid size to use for tiling each image. Note: TileGridSize must not - be - None to enable small object detection logic. A string containing two integers in mxn format. - Note: This settings is not supported for the 'yolov5' algorithm. - :vartype tile_grid_size: str - :ivar tile_overlap_ratio: Overlap ratio between adjacent tiles in each dimension. Must be float - in the range [0, 1). - Note: This settings is not supported for the 'yolov5' algorithm. - :vartype tile_overlap_ratio: float - :ivar tile_predictions_nms_threshold: The IOU threshold to use to perform NMS while merging - predictions from tiles and image. - Used in validation/ inference. Must be float in the range [0, 1]. - Note: This settings is not supported for the 'yolov5' algorithm. - :vartype tile_predictions_nms_threshold: float - :ivar validation_iou_threshold: IOU threshold to use when computing validation metric. Must be - float in the range [0, 1]. - :vartype validation_iou_threshold: float - :ivar validation_metric_type: Metric computation method to use for validation metrics. Possible - values include: "None", "Coco", "Voc", "CocoVoc". - :vartype validation_metric_type: str or - ~azure.mgmt.machinelearningservices.models.ValidationMetricType - """ - - _attribute_map = { - 'advanced_settings': {'key': 'advancedSettings', 'type': 'str'}, - 'ams_gradient': {'key': 'amsGradient', 'type': 'bool'}, - 'augmentations': {'key': 'augmentations', 'type': 'str'}, - 'beta1': {'key': 'beta1', 'type': 'float'}, - 'beta2': {'key': 'beta2', 'type': 'float'}, - 'checkpoint_frequency': {'key': 'checkpointFrequency', 'type': 'int'}, - 'checkpoint_model': {'key': 'checkpointModel', 'type': 'MLFlowModelJobInput'}, - 'checkpoint_run_id': {'key': 'checkpointRunId', 'type': 'str'}, - 'distributed': {'key': 'distributed', 'type': 'bool'}, - 'early_stopping': {'key': 'earlyStopping', 'type': 'bool'}, - 'early_stopping_delay': {'key': 'earlyStoppingDelay', 'type': 'int'}, - 'early_stopping_patience': {'key': 'earlyStoppingPatience', 'type': 'int'}, - 'enable_onnx_normalization': {'key': 'enableOnnxNormalization', 'type': 'bool'}, - 'evaluation_frequency': {'key': 'evaluationFrequency', 'type': 'int'}, - 'gradient_accumulation_step': {'key': 'gradientAccumulationStep', 'type': 'int'}, - 'layers_to_freeze': {'key': 'layersToFreeze', 'type': 'int'}, - 'learning_rate': {'key': 'learningRate', 'type': 'float'}, - 'learning_rate_scheduler': {'key': 'learningRateScheduler', 'type': 'str'}, - 'model_name': {'key': 'modelName', 'type': 'str'}, - 'momentum': {'key': 'momentum', 'type': 'float'}, - 'nesterov': {'key': 'nesterov', 'type': 'bool'}, - 'number_of_epochs': {'key': 'numberOfEpochs', 'type': 'int'}, - 'number_of_workers': {'key': 'numberOfWorkers', 'type': 'int'}, - 'optimizer': {'key': 'optimizer', 'type': 'str'}, - 'random_seed': {'key': 'randomSeed', 'type': 'int'}, - 'step_lr_gamma': {'key': 'stepLRGamma', 'type': 'float'}, - 'step_lr_step_size': {'key': 'stepLRStepSize', 'type': 'int'}, - 'training_batch_size': {'key': 'trainingBatchSize', 'type': 'int'}, - 'validation_batch_size': {'key': 'validationBatchSize', 'type': 'int'}, - 'warmup_cosine_lr_cycles': {'key': 'warmupCosineLRCycles', 'type': 'float'}, - 'warmup_cosine_lr_warmup_epochs': {'key': 'warmupCosineLRWarmupEpochs', 'type': 'int'}, - 'weight_decay': {'key': 'weightDecay', 'type': 'float'}, - 'box_detections_per_image': {'key': 'boxDetectionsPerImage', 'type': 'int'}, - 'box_score_threshold': {'key': 'boxScoreThreshold', 'type': 'float'}, - 'image_size': {'key': 'imageSize', 'type': 'int'}, - 'log_training_metrics': {'key': 'logTrainingMetrics', 'type': 'str'}, - 'log_validation_loss': {'key': 'logValidationLoss', 'type': 'str'}, - 'max_size': {'key': 'maxSize', 'type': 'int'}, - 'min_size': {'key': 'minSize', 'type': 'int'}, - 'model_size': {'key': 'modelSize', 'type': 'str'}, - 'multi_scale': {'key': 'multiScale', 'type': 'bool'}, - 'nms_iou_threshold': {'key': 'nmsIouThreshold', 'type': 'float'}, - 'tile_grid_size': {'key': 'tileGridSize', 'type': 'str'}, - 'tile_overlap_ratio': {'key': 'tileOverlapRatio', 'type': 'float'}, - 'tile_predictions_nms_threshold': {'key': 'tilePredictionsNmsThreshold', 'type': 'float'}, - 'validation_iou_threshold': {'key': 'validationIouThreshold', 'type': 'float'}, - 'validation_metric_type': {'key': 'validationMetricType', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): + _attribute_map = { + "advanced_settings": {"key": "advancedSettings", "type": "str"}, + "ams_gradient": {"key": "amsGradient", "type": "bool"}, + "augmentations": {"key": "augmentations", "type": "str"}, + "beta1": {"key": "beta1", "type": "float"}, + "beta2": {"key": "beta2", "type": "float"}, + "checkpoint_frequency": {"key": "checkpointFrequency", "type": "int"}, + "checkpoint_model": {"key": "checkpointModel", "type": "MLFlowModelJobInput"}, + "checkpoint_run_id": {"key": "checkpointRunId", "type": "str"}, + "distributed": {"key": "distributed", "type": "bool"}, + "early_stopping": {"key": "earlyStopping", "type": "bool"}, + "early_stopping_delay": {"key": "earlyStoppingDelay", "type": "int"}, + "early_stopping_patience": {"key": "earlyStoppingPatience", "type": "int"}, + "enable_onnx_normalization": {"key": "enableOnnxNormalization", "type": "bool"}, + "evaluation_frequency": {"key": "evaluationFrequency", "type": "int"}, + "gradient_accumulation_step": {"key": "gradientAccumulationStep", "type": "int"}, + "layers_to_freeze": {"key": "layersToFreeze", "type": "int"}, + "learning_rate": {"key": "learningRate", "type": "float"}, + "learning_rate_scheduler": {"key": "learningRateScheduler", "type": "str"}, + "model_name": {"key": "modelName", "type": "str"}, + "momentum": {"key": "momentum", "type": "float"}, + "nesterov": {"key": "nesterov", "type": "bool"}, + "number_of_epochs": {"key": "numberOfEpochs", "type": "int"}, + "number_of_workers": {"key": "numberOfWorkers", "type": "int"}, + "optimizer": {"key": "optimizer", "type": "str"}, + "random_seed": {"key": "randomSeed", "type": "int"}, + "step_lr_gamma": {"key": "stepLRGamma", "type": "float"}, + "step_lr_step_size": {"key": "stepLRStepSize", "type": "int"}, + "training_batch_size": {"key": "trainingBatchSize", "type": "int"}, + "validation_batch_size": {"key": "validationBatchSize", "type": "int"}, + "warmup_cosine_lr_cycles": {"key": "warmupCosineLRCycles", "type": "float"}, + "warmup_cosine_lr_warmup_epochs": {"key": "warmupCosineLRWarmupEpochs", "type": "int"}, + "weight_decay": {"key": "weightDecay", "type": "float"}, + "box_detections_per_image": {"key": "boxDetectionsPerImage", "type": "int"}, + "box_score_threshold": {"key": "boxScoreThreshold", "type": "float"}, + "image_size": {"key": "imageSize", "type": "int"}, + "log_training_metrics": {"key": "logTrainingMetrics", "type": "str"}, + "log_validation_loss": {"key": "logValidationLoss", "type": "str"}, + "max_size": {"key": "maxSize", "type": "int"}, + "min_size": {"key": "minSize", "type": "int"}, + "model_size": {"key": "modelSize", "type": "str"}, + "multi_scale": {"key": "multiScale", "type": "bool"}, + "nms_iou_threshold": {"key": "nmsIouThreshold", "type": "float"}, + "tile_grid_size": {"key": "tileGridSize", "type": "str"}, + "tile_overlap_ratio": {"key": "tileOverlapRatio", "type": "float"}, + "tile_predictions_nms_threshold": {"key": "tilePredictionsNmsThreshold", "type": "float"}, + "validation_iou_threshold": {"key": "validationIouThreshold", "type": "float"}, + "validation_metric_type": {"key": "validationMetricType", "type": "str"}, + } + + def __init__(self, **kwargs): """ :keyword advanced_settings: Settings for advanced scenarios. :paramtype advanced_settings: str @@ -18795,90 +17823,87 @@ def __init__( ~azure.mgmt.machinelearningservices.models.ValidationMetricType """ super(ImageModelSettingsObjectDetection, self).__init__(**kwargs) - self.box_detections_per_image = kwargs.get('box_detections_per_image', None) - self.box_score_threshold = kwargs.get('box_score_threshold', None) - self.image_size = kwargs.get('image_size', None) - self.log_training_metrics = kwargs.get('log_training_metrics', None) - self.log_validation_loss = kwargs.get('log_validation_loss', None) - self.max_size = kwargs.get('max_size', None) - self.min_size = kwargs.get('min_size', None) - self.model_size = kwargs.get('model_size', None) - self.multi_scale = kwargs.get('multi_scale', None) - self.nms_iou_threshold = kwargs.get('nms_iou_threshold', None) - self.tile_grid_size = kwargs.get('tile_grid_size', None) - self.tile_overlap_ratio = kwargs.get('tile_overlap_ratio', None) - self.tile_predictions_nms_threshold = kwargs.get('tile_predictions_nms_threshold', None) - self.validation_iou_threshold = kwargs.get('validation_iou_threshold', None) - self.validation_metric_type = kwargs.get('validation_metric_type', None) + self.box_detections_per_image = kwargs.get("box_detections_per_image", None) + self.box_score_threshold = kwargs.get("box_score_threshold", None) + self.image_size = kwargs.get("image_size", None) + self.log_training_metrics = kwargs.get("log_training_metrics", None) + self.log_validation_loss = kwargs.get("log_validation_loss", None) + self.max_size = kwargs.get("max_size", None) + self.min_size = kwargs.get("min_size", None) + self.model_size = kwargs.get("model_size", None) + self.multi_scale = kwargs.get("multi_scale", None) + self.nms_iou_threshold = kwargs.get("nms_iou_threshold", None) + self.tile_grid_size = kwargs.get("tile_grid_size", None) + self.tile_overlap_ratio = kwargs.get("tile_overlap_ratio", None) + self.tile_predictions_nms_threshold = kwargs.get("tile_predictions_nms_threshold", None) + self.validation_iou_threshold = kwargs.get("validation_iou_threshold", None) + self.validation_metric_type = kwargs.get("validation_metric_type", None) class ImageObjectDetection(AutoMLVertical, ImageObjectDetectionBase): """Image Object Detection. Object detection is used to identify objects in an image and locate each object with a -bounding box e.g. locate all dogs and cats in an image and draw a bounding box around each. + bounding box e.g. locate all dogs and cats in an image and draw a bounding box around each. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to Azure. - :ivar limit_settings: Required. [Required] Limit settings for the AutoML job. - :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings - :ivar sweep_settings: Model sweeping and hyperparameter sweeping related settings. - :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings - :ivar validation_data: Validation data inputs. - :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :ivar validation_data_size: The fraction of training dataset that needs to be set aside for - validation purpose. - Values between (0.0 , 1.0) - Applied when validation dataset is not provided. - :vartype validation_data_size: float - :ivar model_settings: Settings used for training the model. - :vartype model_settings: - ~azure.mgmt.machinelearningservices.models.ImageModelSettingsObjectDetection - :ivar search_space: Search space for sampling different combinations of models and their - hyperparameters. - :vartype search_space: - list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsObjectDetection] - :ivar log_verbosity: Log verbosity for the job. Possible values include: "NotSet", "Debug", - "Info", "Warning", "Error", "Critical". - :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity - :ivar target_column_name: Target column name: This is prediction values column. - Also known as label column name in context of classification tasks. - :vartype target_column_name: str - :ivar task_type: Required. [Required] Task type for AutoMLJob.Constant filled by server. - Possible values include: "Classification", "Regression", "Forecasting", "ImageClassification", - "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", - "TextClassification", "TextClassificationMultilabel", "TextNER". - :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType - :ivar training_data: Required. [Required] Training data input. - :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :ivar primary_metric: Primary metric to optimize for this task. Possible values include: - "MeanAveragePrecision". - :vartype primary_metric: str or - ~azure.mgmt.machinelearningservices.models.ObjectDetectionPrimaryMetrics + :ivar limit_settings: Required. [Required] Limit settings for the AutoML job. + :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings + :ivar sweep_settings: Model sweeping and hyperparameter sweeping related settings. + :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings + :ivar validation_data: Validation data inputs. + :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar validation_data_size: The fraction of training dataset that needs to be set aside for + validation purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :vartype validation_data_size: float + :ivar model_settings: Settings used for training the model. + :vartype model_settings: + ~azure.mgmt.machinelearningservices.models.ImageModelSettingsObjectDetection + :ivar search_space: Search space for sampling different combinations of models and their + hyperparameters. + :vartype search_space: + list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsObjectDetection] + :ivar log_verbosity: Log verbosity for the job. Possible values include: "NotSet", "Debug", + "Info", "Warning", "Error", "Critical". + :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :ivar target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :vartype target_column_name: str + :ivar task_type: Required. [Required] Task type for AutoMLJob.Constant filled by server. + Possible values include: "Classification", "Regression", "Forecasting", "ImageClassification", + "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", + "TextClassification", "TextClassificationMultilabel", "TextNER". + :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType + :ivar training_data: Required. [Required] Training data input. + :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar primary_metric: Primary metric to optimize for this task. Possible values include: + "MeanAveragePrecision". + :vartype primary_metric: str or + ~azure.mgmt.machinelearningservices.models.ObjectDetectionPrimaryMetrics """ _validation = { - 'limit_settings': {'required': True}, - 'task_type': {'required': True}, - 'training_data': {'required': True}, + "limit_settings": {"required": True}, + "task_type": {"required": True}, + "training_data": {"required": True}, } _attribute_map = { - 'limit_settings': {'key': 'limitSettings', 'type': 'ImageLimitSettings'}, - 'sweep_settings': {'key': 'sweepSettings', 'type': 'ImageSweepSettings'}, - 'validation_data': {'key': 'validationData', 'type': 'MLTableJobInput'}, - 'validation_data_size': {'key': 'validationDataSize', 'type': 'float'}, - 'model_settings': {'key': 'modelSettings', 'type': 'ImageModelSettingsObjectDetection'}, - 'search_space': {'key': 'searchSpace', 'type': '[ImageModelDistributionSettingsObjectDetection]'}, - 'log_verbosity': {'key': 'logVerbosity', 'type': 'str'}, - 'target_column_name': {'key': 'targetColumnName', 'type': 'str'}, - 'task_type': {'key': 'taskType', 'type': 'str'}, - 'training_data': {'key': 'trainingData', 'type': 'MLTableJobInput'}, - 'primary_metric': {'key': 'primaryMetric', 'type': 'str'}, + "limit_settings": {"key": "limitSettings", "type": "ImageLimitSettings"}, + "sweep_settings": {"key": "sweepSettings", "type": "ImageSweepSettings"}, + "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, + "validation_data_size": {"key": "validationDataSize", "type": "float"}, + "model_settings": {"key": "modelSettings", "type": "ImageModelSettingsObjectDetection"}, + "search_space": {"key": "searchSpace", "type": "[ImageModelDistributionSettingsObjectDetection]"}, + "log_verbosity": {"key": "logVerbosity", "type": "str"}, + "target_column_name": {"key": "targetColumnName", "type": "str"}, + "task_type": {"key": "taskType", "type": "str"}, + "training_data": {"key": "trainingData", "type": "MLTableJobInput"}, + "primary_metric": {"key": "primaryMetric", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword limit_settings: Required. [Required] Limit settings for the AutoML job. :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings @@ -18912,17 +17937,17 @@ def __init__( ~azure.mgmt.machinelearningservices.models.ObjectDetectionPrimaryMetrics """ super(ImageObjectDetection, self).__init__(**kwargs) - self.limit_settings = kwargs['limit_settings'] - self.sweep_settings = kwargs.get('sweep_settings', None) - self.validation_data = kwargs.get('validation_data', None) - self.validation_data_size = kwargs.get('validation_data_size', None) - self.model_settings = kwargs.get('model_settings', None) - self.search_space = kwargs.get('search_space', None) - self.task_type = 'ImageObjectDetection' # type: str - self.primary_metric = kwargs.get('primary_metric', None) - self.log_verbosity = kwargs.get('log_verbosity', None) - self.target_column_name = kwargs.get('target_column_name', None) - self.training_data = kwargs['training_data'] + self.limit_settings = kwargs["limit_settings"] + self.sweep_settings = kwargs.get("sweep_settings", None) + self.validation_data = kwargs.get("validation_data", None) + self.validation_data_size = kwargs.get("validation_data_size", None) + self.model_settings = kwargs.get("model_settings", None) + self.search_space = kwargs.get("search_space", None) + self.task_type = "ImageObjectDetection" # type: str + self.primary_metric = kwargs.get("primary_metric", None) + self.log_verbosity = kwargs.get("log_verbosity", None) + self.target_column_name = kwargs.get("target_column_name", None) + self.training_data = kwargs["training_data"] class ImageSweepSettings(msrest.serialization.Model): @@ -18939,18 +17964,15 @@ class ImageSweepSettings(msrest.serialization.Model): """ _validation = { - 'sampling_algorithm': {'required': True}, + "sampling_algorithm": {"required": True}, } _attribute_map = { - 'early_termination': {'key': 'earlyTermination', 'type': 'EarlyTerminationPolicy'}, - 'sampling_algorithm': {'key': 'samplingAlgorithm', 'type': 'str'}, + "early_termination": {"key": "earlyTermination", "type": "EarlyTerminationPolicy"}, + "sampling_algorithm": {"key": "samplingAlgorithm", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword early_termination: Type of early termination policy. :paramtype early_termination: ~azure.mgmt.machinelearningservices.models.EarlyTerminationPolicy @@ -18960,8 +17982,8 @@ def __init__( ~azure.mgmt.machinelearningservices.models.SamplingAlgorithmType """ super(ImageSweepSettings, self).__init__(**kwargs) - self.early_termination = kwargs.get('early_termination', None) - self.sampling_algorithm = kwargs['sampling_algorithm'] + self.early_termination = kwargs.get("early_termination", None) + self.sampling_algorithm = kwargs["sampling_algorithm"] class ImportDataAction(ScheduleActionBase): @@ -18978,27 +18000,24 @@ class ImportDataAction(ScheduleActionBase): """ _validation = { - 'action_type': {'required': True}, - 'data_import_definition': {'required': True}, + "action_type": {"required": True}, + "data_import_definition": {"required": True}, } _attribute_map = { - 'action_type': {'key': 'actionType', 'type': 'str'}, - 'data_import_definition': {'key': 'dataImportDefinition', 'type': 'DataImport'}, + "action_type": {"key": "actionType", "type": "str"}, + "data_import_definition": {"key": "dataImportDefinition", "type": "DataImport"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword data_import_definition: Required. [Required] Defines Schedule action definition details. :paramtype data_import_definition: ~azure.mgmt.machinelearningservices.models.DataImport """ super(ImportDataAction, self).__init__(**kwargs) - self.action_type = 'ImportData' # type: str - self.data_import_definition = kwargs['data_import_definition'] + self.action_type = "ImportData" # type: str + self.data_import_definition = kwargs["data_import_definition"] class IndexColumn(msrest.serialization.Model): @@ -19012,14 +18031,11 @@ class IndexColumn(msrest.serialization.Model): """ _attribute_map = { - 'column_name': {'key': 'columnName', 'type': 'str'}, - 'data_type': {'key': 'dataType', 'type': 'str'}, + "column_name": {"key": "columnName", "type": "str"}, + "data_type": {"key": "dataType", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword column_name: Specifies the column name. :paramtype column_name: str @@ -19028,8 +18044,8 @@ def __init__( :paramtype data_type: str or ~azure.mgmt.machinelearningservices.models.FeatureDataType """ super(IndexColumn, self).__init__(**kwargs) - self.column_name = kwargs.get('column_name', None) - self.data_type = kwargs.get('data_type', None) + self.column_name = kwargs.get("column_name", None) + self.data_type = kwargs.get("data_type", None) class InferenceContainerProperties(msrest.serialization.Model): @@ -19045,15 +18061,12 @@ class InferenceContainerProperties(msrest.serialization.Model): """ _attribute_map = { - 'liveness_route': {'key': 'livenessRoute', 'type': 'Route'}, - 'readiness_route': {'key': 'readinessRoute', 'type': 'Route'}, - 'scoring_route': {'key': 'scoringRoute', 'type': 'Route'}, + "liveness_route": {"key": "livenessRoute", "type": "Route"}, + "readiness_route": {"key": "readinessRoute", "type": "Route"}, + "scoring_route": {"key": "scoringRoute", "type": "Route"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword liveness_route: The route to check the liveness of the inference server container. :paramtype liveness_route: ~azure.mgmt.machinelearningservices.models.Route @@ -19064,9 +18077,9 @@ def __init__( :paramtype scoring_route: ~azure.mgmt.machinelearningservices.models.Route """ super(InferenceContainerProperties, self).__init__(**kwargs) - self.liveness_route = kwargs.get('liveness_route', None) - self.readiness_route = kwargs.get('readiness_route', None) - self.scoring_route = kwargs.get('scoring_route', None) + self.liveness_route = kwargs.get("liveness_route", None) + self.readiness_route = kwargs.get("readiness_route", None) + self.scoring_route = kwargs.get("scoring_route", None) class InferenceEndpoint(TrackedResource): @@ -19103,31 +18116,28 @@ class InferenceEndpoint(TrackedResource): """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'location': {'required': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "location": {"required": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'location': {'key': 'location', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'}, - 'kind': {'key': 'kind', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'InferenceEndpointProperties'}, - 'sku': {'key': 'sku', 'type': 'Sku'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "tags": {"key": "tags", "type": "{str}"}, + "location": {"key": "location", "type": "str"}, + "identity": {"key": "identity", "type": "ManagedServiceIdentity"}, + "kind": {"key": "kind", "type": "str"}, + "properties": {"key": "properties", "type": "InferenceEndpointProperties"}, + "sku": {"key": "sku", "type": "Sku"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword tags: A set of tags. Resource tags. :paramtype tags: dict[str, str] @@ -19144,10 +18154,10 @@ def __init__( :paramtype sku: ~azure.mgmt.machinelearningservices.models.Sku """ super(InferenceEndpoint, self).__init__(**kwargs) - self.identity = kwargs.get('identity', None) - self.kind = kwargs.get('kind', None) - self.properties = kwargs['properties'] - self.sku = kwargs.get('sku', None) + self.identity = kwargs.get("identity", None) + self.kind = kwargs.get("kind", None) + self.properties = kwargs["properties"] + self.sku = kwargs.get("sku", None) class PropertiesBase(msrest.serialization.Model): @@ -19160,14 +18170,11 @@ class PropertiesBase(msrest.serialization.Model): """ _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword description: Description of the resource. :paramtype description: str @@ -19175,8 +18182,8 @@ def __init__( :paramtype properties: dict[str, str] """ super(PropertiesBase, self).__init__(**kwargs) - self.description = kwargs.get('description', None) - self.properties = kwargs.get('properties', None) + self.description = kwargs.get("description", None) + self.properties = kwargs.get("properties", None) class InferenceEndpointProperties(PropertiesBase): @@ -19205,25 +18212,22 @@ class InferenceEndpointProperties(PropertiesBase): """ _validation = { - 'auth_mode': {'required': True}, - 'endpoint_uri': {'readonly': True}, - 'group_id': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'provisioning_state': {'readonly': True}, + "auth_mode": {"required": True}, + "endpoint_uri": {"readonly": True}, + "group_id": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "provisioning_state": {"readonly": True}, } _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'auth_mode': {'key': 'authMode', 'type': 'str'}, - 'endpoint_uri': {'key': 'endpointUri', 'type': 'str'}, - 'group_id': {'key': 'groupId', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "auth_mode": {"key": "authMode", "type": "str"}, + "endpoint_uri": {"key": "endpointUri", "type": "str"}, + "group_id": {"key": "groupId", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword description: Description of the resource. :paramtype description: str @@ -19237,9 +18241,9 @@ def __init__( :paramtype group_id: str """ super(InferenceEndpointProperties, self).__init__(**kwargs) - self.auth_mode = kwargs['auth_mode'] + self.auth_mode = kwargs["auth_mode"] self.endpoint_uri = None - self.group_id = kwargs['group_id'] + self.group_id = kwargs["group_id"] self.provisioning_state = None @@ -19254,14 +18258,11 @@ class InferenceEndpointTrackedResourceArmPaginatedResult(msrest.serialization.Mo """ _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[InferenceEndpoint]'}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[InferenceEndpoint]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword next_link: The link to the next page of InferenceEndpoint objects. If null, there are no additional pages. @@ -19270,8 +18271,8 @@ def __init__( :paramtype value: list[~azure.mgmt.machinelearningservices.models.InferenceEndpoint] """ super(InferenceEndpointTrackedResourceArmPaginatedResult, self).__init__(**kwargs) - self.next_link = kwargs.get('next_link', None) - self.value = kwargs.get('value', None) + self.next_link = kwargs.get("next_link", None) + self.value = kwargs.get("value", None) class InferenceGroup(TrackedResource): @@ -19308,31 +18309,28 @@ class InferenceGroup(TrackedResource): """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'location': {'required': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "location": {"required": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'location': {'key': 'location', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'}, - 'kind': {'key': 'kind', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'InferenceGroupProperties'}, - 'sku': {'key': 'sku', 'type': 'Sku'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "tags": {"key": "tags", "type": "{str}"}, + "location": {"key": "location", "type": "str"}, + "identity": {"key": "identity", "type": "ManagedServiceIdentity"}, + "kind": {"key": "kind", "type": "str"}, + "properties": {"key": "properties", "type": "InferenceGroupProperties"}, + "sku": {"key": "sku", "type": "Sku"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword tags: A set of tags. Resource tags. :paramtype tags: dict[str, str] @@ -19349,10 +18347,10 @@ def __init__( :paramtype sku: ~azure.mgmt.machinelearningservices.models.Sku """ super(InferenceGroup, self).__init__(**kwargs) - self.identity = kwargs.get('identity', None) - self.kind = kwargs.get('kind', None) - self.properties = kwargs['properties'] - self.sku = kwargs.get('sku', None) + self.identity = kwargs.get("identity", None) + self.kind = kwargs.get("kind", None) + self.properties = kwargs["properties"] + self.sku = kwargs.get("sku", None) class InferenceGroupProperties(PropertiesBase): @@ -19379,22 +18377,19 @@ class InferenceGroupProperties(PropertiesBase): """ _validation = { - 'provisioning_state': {'readonly': True}, + "provisioning_state": {"readonly": True}, } _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'bonus_extra_capacity': {'key': 'bonusExtraCapacity', 'type': 'int'}, - 'metadata': {'key': 'metadata', 'type': 'str'}, - 'priority': {'key': 'priority', 'type': 'int'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "bonus_extra_capacity": {"key": "bonusExtraCapacity", "type": "int"}, + "metadata": {"key": "metadata", "type": "str"}, + "priority": {"key": "priority", "type": "int"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword description: Description of the resource. :paramtype description: str @@ -19410,9 +18405,9 @@ def __init__( :paramtype priority: int """ super(InferenceGroupProperties, self).__init__(**kwargs) - self.bonus_extra_capacity = kwargs.get('bonus_extra_capacity', 0) - self.metadata = kwargs.get('metadata', None) - self.priority = kwargs.get('priority', 0) + self.bonus_extra_capacity = kwargs.get("bonus_extra_capacity", 0) + self.metadata = kwargs.get("metadata", None) + self.priority = kwargs.get("priority", 0) self.provisioning_state = None @@ -19427,14 +18422,11 @@ class InferenceGroupTrackedResourceArmPaginatedResult(msrest.serialization.Model """ _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[InferenceGroup]'}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[InferenceGroup]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword next_link: The link to the next page of InferenceGroup objects. If null, there are no additional pages. @@ -19443,8 +18435,8 @@ def __init__( :paramtype value: list[~azure.mgmt.machinelearningservices.models.InferenceGroup] """ super(InferenceGroupTrackedResourceArmPaginatedResult, self).__init__(**kwargs) - self.next_link = kwargs.get('next_link', None) - self.value = kwargs.get('value', None) + self.next_link = kwargs.get("next_link", None) + self.value = kwargs.get("value", None) class InferencePool(TrackedResource): @@ -19481,31 +18473,28 @@ class InferencePool(TrackedResource): """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'location': {'required': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "location": {"required": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'location': {'key': 'location', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'}, - 'kind': {'key': 'kind', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'InferencePoolProperties'}, - 'sku': {'key': 'sku', 'type': 'Sku'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "tags": {"key": "tags", "type": "{str}"}, + "location": {"key": "location", "type": "str"}, + "identity": {"key": "identity", "type": "ManagedServiceIdentity"}, + "kind": {"key": "kind", "type": "str"}, + "properties": {"key": "properties", "type": "InferencePoolProperties"}, + "sku": {"key": "sku", "type": "Sku"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword tags: A set of tags. Resource tags. :paramtype tags: dict[str, str] @@ -19522,10 +18511,10 @@ def __init__( :paramtype sku: ~azure.mgmt.machinelearningservices.models.Sku """ super(InferencePool, self).__init__(**kwargs) - self.identity = kwargs.get('identity', None) - self.kind = kwargs.get('kind', None) - self.properties = kwargs['properties'] - self.sku = kwargs.get('sku', None) + self.identity = kwargs.get("identity", None) + self.kind = kwargs.get("kind", None) + self.properties = kwargs["properties"] + self.sku = kwargs.get("sku", None) class InferencePoolProperties(PropertiesBase): @@ -19557,25 +18546,22 @@ class InferencePoolProperties(PropertiesBase): """ _validation = { - 'node_sku_type': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'provisioning_state': {'readonly': True}, + "node_sku_type": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "provisioning_state": {"readonly": True}, } _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'code_configuration': {'key': 'codeConfiguration', 'type': 'CodeConfiguration'}, - 'environment_configuration': {'key': 'environmentConfiguration', 'type': 'PoolEnvironmentConfiguration'}, - 'model_configuration': {'key': 'modelConfiguration', 'type': 'PoolModelConfiguration'}, - 'node_sku_type': {'key': 'nodeSkuType', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'request_configuration': {'key': 'requestConfiguration', 'type': 'RequestConfiguration'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "code_configuration": {"key": "codeConfiguration", "type": "CodeConfiguration"}, + "environment_configuration": {"key": "environmentConfiguration", "type": "PoolEnvironmentConfiguration"}, + "model_configuration": {"key": "modelConfiguration", "type": "PoolModelConfiguration"}, + "node_sku_type": {"key": "nodeSkuType", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "request_configuration": {"key": "requestConfiguration", "type": "RequestConfiguration"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword description: Description of the resource. :paramtype description: str @@ -19596,12 +18582,12 @@ def __init__( ~azure.mgmt.machinelearningservices.models.RequestConfiguration """ super(InferencePoolProperties, self).__init__(**kwargs) - self.code_configuration = kwargs.get('code_configuration', None) - self.environment_configuration = kwargs.get('environment_configuration', None) - self.model_configuration = kwargs.get('model_configuration', None) - self.node_sku_type = kwargs['node_sku_type'] + self.code_configuration = kwargs.get("code_configuration", None) + self.environment_configuration = kwargs.get("environment_configuration", None) + self.model_configuration = kwargs.get("model_configuration", None) + self.node_sku_type = kwargs["node_sku_type"] self.provisioning_state = None - self.request_configuration = kwargs.get('request_configuration', None) + self.request_configuration = kwargs.get("request_configuration", None) class InferencePoolTrackedResourceArmPaginatedResult(msrest.serialization.Model): @@ -19615,14 +18601,11 @@ class InferencePoolTrackedResourceArmPaginatedResult(msrest.serialization.Model) """ _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[InferencePool]'}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[InferencePool]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword next_link: The link to the next page of InferencePool objects. If null, there are no additional pages. @@ -19631,8 +18614,8 @@ def __init__( :paramtype value: list[~azure.mgmt.machinelearningservices.models.InferencePool] """ super(InferencePoolTrackedResourceArmPaginatedResult, self).__init__(**kwargs) - self.next_link = kwargs.get('next_link', None) - self.value = kwargs.get('value', None) + self.next_link = kwargs.get("next_link", None) + self.value = kwargs.get("value", None) class InstanceTypeSchema(msrest.serialization.Model): @@ -19645,14 +18628,11 @@ class InstanceTypeSchema(msrest.serialization.Model): """ _attribute_map = { - 'node_selector': {'key': 'nodeSelector', 'type': '{str}'}, - 'resources': {'key': 'resources', 'type': 'InstanceTypeSchemaResources'}, + "node_selector": {"key": "nodeSelector", "type": "{str}"}, + "resources": {"key": "resources", "type": "InstanceTypeSchemaResources"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword node_selector: Node Selector. :paramtype node_selector: dict[str, str] @@ -19660,8 +18640,8 @@ def __init__( :paramtype resources: ~azure.mgmt.machinelearningservices.models.InstanceTypeSchemaResources """ super(InstanceTypeSchema, self).__init__(**kwargs) - self.node_selector = kwargs.get('node_selector', None) - self.resources = kwargs.get('resources', None) + self.node_selector = kwargs.get("node_selector", None) + self.resources = kwargs.get("resources", None) class InstanceTypeSchemaResources(msrest.serialization.Model): @@ -19674,14 +18654,11 @@ class InstanceTypeSchemaResources(msrest.serialization.Model): """ _attribute_map = { - 'requests': {'key': 'requests', 'type': '{str}'}, - 'limits': {'key': 'limits', 'type': '{str}'}, + "requests": {"key": "requests", "type": "{str}"}, + "limits": {"key": "limits", "type": "{str}"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword requests: Resource requests for this instance type. :paramtype requests: dict[str, str] @@ -19689,8 +18666,8 @@ def __init__( :paramtype limits: dict[str, str] """ super(InstanceTypeSchemaResources, self).__init__(**kwargs) - self.requests = kwargs.get('requests', None) - self.limits = kwargs.get('limits', None) + self.requests = kwargs.get("requests", None) + self.limits = kwargs.get("limits", None) class IntellectualProperty(msrest.serialization.Model): @@ -19707,18 +18684,15 @@ class IntellectualProperty(msrest.serialization.Model): """ _validation = { - 'publisher': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "publisher": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'protection_level': {'key': 'protectionLevel', 'type': 'str'}, - 'publisher': {'key': 'publisher', 'type': 'str'}, + "protection_level": {"key": "protectionLevel", "type": "str"}, + "publisher": {"key": "publisher", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword protection_level: Protection level of the Intellectual Property. Possible values include: "All", "None". @@ -19728,8 +18702,8 @@ def __init__( :paramtype publisher: str """ super(IntellectualProperty, self).__init__(**kwargs) - self.protection_level = kwargs.get('protection_level', None) - self.publisher = kwargs['publisher'] + self.protection_level = kwargs.get("protection_level", None) + self.publisher = kwargs["publisher"] class JobBase(ProxyResource): @@ -19755,31 +18729,28 @@ class JobBase(ProxyResource): """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'properties': {'key': 'properties', 'type': 'JobBaseProperties'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "JobBaseProperties"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword properties: Required. [Required] Additional attributes of the entity. :paramtype properties: ~azure.mgmt.machinelearningservices.models.JobBaseProperties """ super(JobBase, self).__init__(**kwargs) - self.properties = kwargs['properties'] + self.properties = kwargs["properties"] class JobBaseResourceArmPaginatedResult(msrest.serialization.Model): @@ -19793,14 +18764,11 @@ class JobBaseResourceArmPaginatedResult(msrest.serialization.Model): """ _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[JobBase]'}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[JobBase]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword next_link: The link to the next page of JobBase objects. If null, there are no additional pages. @@ -19809,8 +18777,8 @@ def __init__( :paramtype value: list[~azure.mgmt.machinelearningservices.models.JobBase] """ super(JobBaseResourceArmPaginatedResult, self).__init__(**kwargs) - self.next_link = kwargs.get('next_link', None) - self.value = kwargs.get('value', None) + self.next_link = kwargs.get("next_link", None) + self.value = kwargs.get("value", None) class JobResourceConfiguration(ResourceConfiguration): @@ -19839,23 +18807,20 @@ class JobResourceConfiguration(ResourceConfiguration): """ _validation = { - 'shm_size': {'pattern': r'\d+[bBkKmMgG]'}, + "shm_size": {"pattern": r"\d+[bBkKmMgG]"}, } _attribute_map = { - 'instance_count': {'key': 'instanceCount', 'type': 'int'}, - 'instance_type': {'key': 'instanceType', 'type': 'str'}, - 'locations': {'key': 'locations', 'type': '[str]'}, - 'max_instance_count': {'key': 'maxInstanceCount', 'type': 'int'}, - 'properties': {'key': 'properties', 'type': '{object}'}, - 'docker_args': {'key': 'dockerArgs', 'type': 'str'}, - 'shm_size': {'key': 'shmSize', 'type': 'str'}, + "instance_count": {"key": "instanceCount", "type": "int"}, + "instance_type": {"key": "instanceType", "type": "str"}, + "locations": {"key": "locations", "type": "[str]"}, + "max_instance_count": {"key": "maxInstanceCount", "type": "int"}, + "properties": {"key": "properties", "type": "{object}"}, + "docker_args": {"key": "dockerArgs", "type": "str"}, + "shm_size": {"key": "shmSize", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword instance_count: Optional number of instances or nodes used by the compute target. :paramtype instance_count: int @@ -19879,8 +18844,8 @@ def __init__( :paramtype shm_size: str """ super(JobResourceConfiguration, self).__init__(**kwargs) - self.docker_args = kwargs.get('docker_args', None) - self.shm_size = kwargs.get('shm_size', "2g") + self.docker_args = kwargs.get("docker_args", None) + self.shm_size = kwargs.get("shm_size", "2g") class JobScheduleAction(ScheduleActionBase): @@ -19897,26 +18862,23 @@ class JobScheduleAction(ScheduleActionBase): """ _validation = { - 'action_type': {'required': True}, - 'job_definition': {'required': True}, + "action_type": {"required": True}, + "job_definition": {"required": True}, } _attribute_map = { - 'action_type': {'key': 'actionType', 'type': 'str'}, - 'job_definition': {'key': 'jobDefinition', 'type': 'JobBaseProperties'}, + "action_type": {"key": "actionType", "type": "str"}, + "job_definition": {"key": "jobDefinition", "type": "JobBaseProperties"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword job_definition: Required. [Required] Defines Schedule action definition details. :paramtype job_definition: ~azure.mgmt.machinelearningservices.models.JobBaseProperties """ super(JobScheduleAction, self).__init__(**kwargs) - self.action_type = 'CreateJob' # type: str - self.job_definition = kwargs['job_definition'] + self.action_type = "CreateJob" # type: str + self.job_definition = kwargs["job_definition"] class JobService(msrest.serialization.Model): @@ -19942,24 +18904,21 @@ class JobService(msrest.serialization.Model): """ _validation = { - 'error_message': {'readonly': True}, - 'status': {'readonly': True}, + "error_message": {"readonly": True}, + "status": {"readonly": True}, } _attribute_map = { - 'endpoint': {'key': 'endpoint', 'type': 'str'}, - 'error_message': {'key': 'errorMessage', 'type': 'str'}, - 'job_service_type': {'key': 'jobServiceType', 'type': 'str'}, - 'nodes': {'key': 'nodes', 'type': 'Nodes'}, - 'port': {'key': 'port', 'type': 'int'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'status': {'key': 'status', 'type': 'str'}, + "endpoint": {"key": "endpoint", "type": "str"}, + "error_message": {"key": "errorMessage", "type": "str"}, + "job_service_type": {"key": "jobServiceType", "type": "str"}, + "nodes": {"key": "nodes", "type": "Nodes"}, + "port": {"key": "port", "type": "int"}, + "properties": {"key": "properties", "type": "{str}"}, + "status": {"key": "status", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword endpoint: Url for endpoint. :paramtype endpoint: str @@ -19974,12 +18933,12 @@ def __init__( :paramtype properties: dict[str, str] """ super(JobService, self).__init__(**kwargs) - self.endpoint = kwargs.get('endpoint', None) + self.endpoint = kwargs.get("endpoint", None) self.error_message = None - self.job_service_type = kwargs.get('job_service_type', None) - self.nodes = kwargs.get('nodes', None) - self.port = kwargs.get('port', None) - self.properties = kwargs.get('properties', None) + self.job_service_type = kwargs.get("job_service_type", None) + self.nodes = kwargs.get("nodes", None) + self.port = kwargs.get("port", None) + self.properties = kwargs.get("properties", None) self.status = None @@ -19995,15 +18954,12 @@ class JupyterKernelConfig(msrest.serialization.Model): """ _attribute_map = { - 'argv': {'key': 'argv', 'type': '[str]'}, - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'language': {'key': 'language', 'type': 'str'}, + "argv": {"key": "argv", "type": "[str]"}, + "display_name": {"key": "displayName", "type": "str"}, + "language": {"key": "language", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword argv: Argument to the the runtime. :paramtype argv: list[str] @@ -20013,9 +18969,9 @@ def __init__( :paramtype language: str """ super(JupyterKernelConfig, self).__init__(**kwargs) - self.argv = kwargs.get('argv', None) - self.display_name = kwargs.get('display_name', None) - self.language = kwargs.get('language', None) + self.argv = kwargs.get("argv", None) + self.display_name = kwargs.get("display_name", None) + self.language = kwargs.get("language", None) class KerberosCredentials(msrest.serialization.Model): @@ -20033,21 +18989,18 @@ class KerberosCredentials(msrest.serialization.Model): """ _validation = { - 'kerberos_kdc_address': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'kerberos_principal': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'kerberos_realm': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "kerberos_kdc_address": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "kerberos_principal": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "kerberos_realm": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'kerberos_kdc_address': {'key': 'kerberosKdcAddress', 'type': 'str'}, - 'kerberos_principal': {'key': 'kerberosPrincipal', 'type': 'str'}, - 'kerberos_realm': {'key': 'kerberosRealm', 'type': 'str'}, + "kerberos_kdc_address": {"key": "kerberosKdcAddress", "type": "str"}, + "kerberos_principal": {"key": "kerberosPrincipal", "type": "str"}, + "kerberos_realm": {"key": "kerberosRealm", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword kerberos_kdc_address: Required. [Required] IP Address or DNS HostName. :paramtype kerberos_kdc_address: str @@ -20058,9 +19011,9 @@ def __init__( :paramtype kerberos_realm: str """ super(KerberosCredentials, self).__init__(**kwargs) - self.kerberos_kdc_address = kwargs['kerberos_kdc_address'] - self.kerberos_principal = kwargs['kerberos_principal'] - self.kerberos_realm = kwargs['kerberos_realm'] + self.kerberos_kdc_address = kwargs["kerberos_kdc_address"] + self.kerberos_principal = kwargs["kerberos_principal"] + self.kerberos_realm = kwargs["kerberos_realm"] class KerberosKeytabCredentials(DatastoreCredentials, KerberosCredentials): @@ -20084,25 +19037,22 @@ class KerberosKeytabCredentials(DatastoreCredentials, KerberosCredentials): """ _validation = { - 'kerberos_kdc_address': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'kerberos_principal': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'kerberos_realm': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'credentials_type': {'required': True}, - 'secrets': {'required': True}, + "kerberos_kdc_address": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "kerberos_principal": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "kerberos_realm": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "credentials_type": {"required": True}, + "secrets": {"required": True}, } _attribute_map = { - 'kerberos_kdc_address': {'key': 'kerberosKdcAddress', 'type': 'str'}, - 'kerberos_principal': {'key': 'kerberosPrincipal', 'type': 'str'}, - 'kerberos_realm': {'key': 'kerberosRealm', 'type': 'str'}, - 'credentials_type': {'key': 'credentialsType', 'type': 'str'}, - 'secrets': {'key': 'secrets', 'type': 'KerberosKeytabSecrets'}, + "kerberos_kdc_address": {"key": "kerberosKdcAddress", "type": "str"}, + "kerberos_principal": {"key": "kerberosPrincipal", "type": "str"}, + "kerberos_realm": {"key": "kerberosRealm", "type": "str"}, + "credentials_type": {"key": "credentialsType", "type": "str"}, + "secrets": {"key": "secrets", "type": "KerberosKeytabSecrets"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword kerberos_kdc_address: Required. [Required] IP Address or DNS HostName. :paramtype kerberos_kdc_address: str @@ -20115,11 +19065,11 @@ def __init__( :paramtype secrets: ~azure.mgmt.machinelearningservices.models.KerberosKeytabSecrets """ super(KerberosKeytabCredentials, self).__init__(**kwargs) - self.kerberos_kdc_address = kwargs['kerberos_kdc_address'] - self.kerberos_principal = kwargs['kerberos_principal'] - self.kerberos_realm = kwargs['kerberos_realm'] - self.credentials_type = 'KerberosKeytab' # type: str - self.secrets = kwargs['secrets'] + self.kerberos_kdc_address = kwargs["kerberos_kdc_address"] + self.kerberos_principal = kwargs["kerberos_principal"] + self.kerberos_realm = kwargs["kerberos_realm"] + self.credentials_type = "KerberosKeytab" # type: str + self.secrets = kwargs["secrets"] class KerberosKeytabSecrets(DatastoreSecrets): @@ -20136,25 +19086,22 @@ class KerberosKeytabSecrets(DatastoreSecrets): """ _validation = { - 'secrets_type': {'required': True}, + "secrets_type": {"required": True}, } _attribute_map = { - 'secrets_type': {'key': 'secretsType', 'type': 'str'}, - 'kerberos_keytab': {'key': 'kerberosKeytab', 'type': 'str'}, + "secrets_type": {"key": "secretsType", "type": "str"}, + "kerberos_keytab": {"key": "kerberosKeytab", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword kerberos_keytab: Kerberos keytab secret. :paramtype kerberos_keytab: str """ super(KerberosKeytabSecrets, self).__init__(**kwargs) - self.secrets_type = 'KerberosKeytab' # type: str - self.kerberos_keytab = kwargs.get('kerberos_keytab', None) + self.secrets_type = "KerberosKeytab" # type: str + self.kerberos_keytab = kwargs.get("kerberos_keytab", None) class KerberosPasswordCredentials(DatastoreCredentials, KerberosCredentials): @@ -20178,25 +19125,22 @@ class KerberosPasswordCredentials(DatastoreCredentials, KerberosCredentials): """ _validation = { - 'kerberos_kdc_address': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'kerberos_principal': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'kerberos_realm': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'credentials_type': {'required': True}, - 'secrets': {'required': True}, + "kerberos_kdc_address": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "kerberos_principal": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "kerberos_realm": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "credentials_type": {"required": True}, + "secrets": {"required": True}, } _attribute_map = { - 'kerberos_kdc_address': {'key': 'kerberosKdcAddress', 'type': 'str'}, - 'kerberos_principal': {'key': 'kerberosPrincipal', 'type': 'str'}, - 'kerberos_realm': {'key': 'kerberosRealm', 'type': 'str'}, - 'credentials_type': {'key': 'credentialsType', 'type': 'str'}, - 'secrets': {'key': 'secrets', 'type': 'KerberosPasswordSecrets'}, + "kerberos_kdc_address": {"key": "kerberosKdcAddress", "type": "str"}, + "kerberos_principal": {"key": "kerberosPrincipal", "type": "str"}, + "kerberos_realm": {"key": "kerberosRealm", "type": "str"}, + "credentials_type": {"key": "credentialsType", "type": "str"}, + "secrets": {"key": "secrets", "type": "KerberosPasswordSecrets"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword kerberos_kdc_address: Required. [Required] IP Address or DNS HostName. :paramtype kerberos_kdc_address: str @@ -20209,11 +19153,11 @@ def __init__( :paramtype secrets: ~azure.mgmt.machinelearningservices.models.KerberosPasswordSecrets """ super(KerberosPasswordCredentials, self).__init__(**kwargs) - self.kerberos_kdc_address = kwargs['kerberos_kdc_address'] - self.kerberos_principal = kwargs['kerberos_principal'] - self.kerberos_realm = kwargs['kerberos_realm'] - self.credentials_type = 'KerberosPassword' # type: str - self.secrets = kwargs['secrets'] + self.kerberos_kdc_address = kwargs["kerberos_kdc_address"] + self.kerberos_principal = kwargs["kerberos_principal"] + self.kerberos_realm = kwargs["kerberos_realm"] + self.credentials_type = "KerberosPassword" # type: str + self.secrets = kwargs["secrets"] class KerberosPasswordSecrets(DatastoreSecrets): @@ -20230,25 +19174,22 @@ class KerberosPasswordSecrets(DatastoreSecrets): """ _validation = { - 'secrets_type': {'required': True}, + "secrets_type": {"required": True}, } _attribute_map = { - 'secrets_type': {'key': 'secretsType', 'type': 'str'}, - 'kerberos_password': {'key': 'kerberosPassword', 'type': 'str'}, + "secrets_type": {"key": "secretsType", "type": "str"}, + "kerberos_password": {"key": "kerberosPassword", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword kerberos_password: Kerberos password secret. :paramtype kerberos_password: str """ super(KerberosPasswordSecrets, self).__init__(**kwargs) - self.secrets_type = 'KerberosPassword' # type: str - self.kerberos_password = kwargs.get('kerberos_password', None) + self.secrets_type = "KerberosPassword" # type: str + self.kerberos_password = kwargs.get("kerberos_password", None) class KeyVaultProperties(msrest.serialization.Model): @@ -20266,20 +19207,17 @@ class KeyVaultProperties(msrest.serialization.Model): """ _validation = { - 'key_identifier': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'key_vault_arm_id': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "key_identifier": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "key_vault_arm_id": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'identity_client_id': {'key': 'identityClientId', 'type': 'str'}, - 'key_identifier': {'key': 'keyIdentifier', 'type': 'str'}, - 'key_vault_arm_id': {'key': 'keyVaultArmId', 'type': 'str'}, + "identity_client_id": {"key": "identityClientId", "type": "str"}, + "key_identifier": {"key": "keyIdentifier", "type": "str"}, + "key_vault_arm_id": {"key": "keyVaultArmId", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword identity_client_id: Currently, we support only SystemAssigned MSI. We need this when we support UserAssignedIdentities. @@ -20290,9 +19228,9 @@ def __init__( :paramtype key_vault_arm_id: str """ super(KeyVaultProperties, self).__init__(**kwargs) - self.identity_client_id = kwargs.get('identity_client_id', None) - self.key_identifier = kwargs['key_identifier'] - self.key_vault_arm_id = kwargs['key_vault_arm_id'] + self.identity_client_id = kwargs.get("identity_client_id", None) + self.key_identifier = kwargs["key_identifier"] + self.key_vault_arm_id = kwargs["key_vault_arm_id"] class KubernetesSchema(msrest.serialization.Model): @@ -20303,19 +19241,16 @@ class KubernetesSchema(msrest.serialization.Model): """ _attribute_map = { - 'properties': {'key': 'properties', 'type': 'KubernetesProperties'}, + "properties": {"key": "properties", "type": "KubernetesProperties"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword properties: Properties of Kubernetes. :paramtype properties: ~azure.mgmt.machinelearningservices.models.KubernetesProperties """ super(KubernetesSchema, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) + self.properties = kwargs.get("properties", None) class Kubernetes(Compute, KubernetesSchema): @@ -20357,32 +19292,29 @@ class Kubernetes(Compute, KubernetesSchema): """ _validation = { - 'compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, + "compute_type": {"required": True}, + "provisioning_state": {"readonly": True}, + "created_on": {"readonly": True}, + "modified_on": {"readonly": True}, + "provisioning_errors": {"readonly": True}, + "is_attached_compute": {"readonly": True}, } _attribute_map = { - 'properties': {'key': 'properties', 'type': 'KubernetesProperties'}, - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, + "properties": {"key": "properties", "type": "KubernetesProperties"}, + "compute_type": {"key": "computeType", "type": "str"}, + "compute_location": {"key": "computeLocation", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "created_on": {"key": "createdOn", "type": "iso-8601"}, + "modified_on": {"key": "modifiedOn", "type": "iso-8601"}, + "resource_id": {"key": "resourceId", "type": "str"}, + "provisioning_errors": {"key": "provisioningErrors", "type": "[ErrorResponse]"}, + "is_attached_compute": {"key": "isAttachedCompute", "type": "bool"}, + "disable_local_auth": {"key": "disableLocalAuth", "type": "bool"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword properties: Properties of Kubernetes. :paramtype properties: ~azure.mgmt.machinelearningservices.models.KubernetesProperties @@ -20397,17 +19329,17 @@ def __init__( :paramtype disable_local_auth: bool """ super(Kubernetes, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) - self.compute_type = 'Kubernetes' # type: str - self.compute_location = kwargs.get('compute_location', None) + self.properties = kwargs.get("properties", None) + self.compute_type = "Kubernetes" # type: str + self.compute_location = kwargs.get("compute_location", None) self.provisioning_state = None - self.description = kwargs.get('description', None) + self.description = kwargs.get("description", None) self.created_on = None self.modified_on = None - self.resource_id = kwargs.get('resource_id', None) + self.resource_id = kwargs.get("resource_id", None) self.provisioning_errors = None self.is_attached_compute = None - self.disable_local_auth = kwargs.get('disable_local_auth', None) + self.disable_local_auth = kwargs.get("disable_local_auth", None) class OnlineDeploymentProperties(EndpointDeploymentPropertiesBase): @@ -20469,38 +19401,35 @@ class OnlineDeploymentProperties(EndpointDeploymentPropertiesBase): """ _validation = { - 'endpoint_compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, + "endpoint_compute_type": {"required": True}, + "provisioning_state": {"readonly": True}, } _attribute_map = { - 'code_configuration': {'key': 'codeConfiguration', 'type': 'CodeConfiguration'}, - 'description': {'key': 'description', 'type': 'str'}, - 'environment_id': {'key': 'environmentId', 'type': 'str'}, - 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'}, - 'data_collector': {'key': 'dataCollector', 'type': 'DataCollector'}, - 'egress_public_network_access': {'key': 'egressPublicNetworkAccess', 'type': 'str'}, - 'endpoint_compute_type': {'key': 'endpointComputeType', 'type': 'str'}, - 'instance_type': {'key': 'instanceType', 'type': 'str'}, - 'liveness_probe': {'key': 'livenessProbe', 'type': 'ProbeSettings'}, - 'model': {'key': 'model', 'type': 'str'}, - 'model_mount_path': {'key': 'modelMountPath', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'readiness_probe': {'key': 'readinessProbe', 'type': 'ProbeSettings'}, - 'request_settings': {'key': 'requestSettings', 'type': 'OnlineRequestSettings'}, - 'scale_settings': {'key': 'scaleSettings', 'type': 'OnlineScaleSettings'}, + "code_configuration": {"key": "codeConfiguration", "type": "CodeConfiguration"}, + "description": {"key": "description", "type": "str"}, + "environment_id": {"key": "environmentId", "type": "str"}, + "environment_variables": {"key": "environmentVariables", "type": "{str}"}, + "properties": {"key": "properties", "type": "{str}"}, + "app_insights_enabled": {"key": "appInsightsEnabled", "type": "bool"}, + "data_collector": {"key": "dataCollector", "type": "DataCollector"}, + "egress_public_network_access": {"key": "egressPublicNetworkAccess", "type": "str"}, + "endpoint_compute_type": {"key": "endpointComputeType", "type": "str"}, + "instance_type": {"key": "instanceType", "type": "str"}, + "liveness_probe": {"key": "livenessProbe", "type": "ProbeSettings"}, + "model": {"key": "model", "type": "str"}, + "model_mount_path": {"key": "modelMountPath", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "readiness_probe": {"key": "readinessProbe", "type": "ProbeSettings"}, + "request_settings": {"key": "requestSettings", "type": "OnlineRequestSettings"}, + "scale_settings": {"key": "scaleSettings", "type": "OnlineScaleSettings"}, } _subtype_map = { - 'endpoint_compute_type': {'Kubernetes': 'KubernetesOnlineDeployment', 'Managed': 'ManagedOnlineDeployment'} + "endpoint_compute_type": {"Kubernetes": "KubernetesOnlineDeployment", "Managed": "ManagedOnlineDeployment"} } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword code_configuration: Code configuration for the endpoint deployment. :paramtype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration @@ -20542,18 +19471,18 @@ def __init__( :paramtype scale_settings: ~azure.mgmt.machinelearningservices.models.OnlineScaleSettings """ super(OnlineDeploymentProperties, self).__init__(**kwargs) - self.app_insights_enabled = kwargs.get('app_insights_enabled', False) - self.data_collector = kwargs.get('data_collector', None) - self.egress_public_network_access = kwargs.get('egress_public_network_access', None) - self.endpoint_compute_type = 'OnlineDeploymentProperties' # type: str - self.instance_type = kwargs.get('instance_type', None) - self.liveness_probe = kwargs.get('liveness_probe', None) - self.model = kwargs.get('model', None) - self.model_mount_path = kwargs.get('model_mount_path', None) + self.app_insights_enabled = kwargs.get("app_insights_enabled", False) + self.data_collector = kwargs.get("data_collector", None) + self.egress_public_network_access = kwargs.get("egress_public_network_access", None) + self.endpoint_compute_type = "OnlineDeploymentProperties" # type: str + self.instance_type = kwargs.get("instance_type", None) + self.liveness_probe = kwargs.get("liveness_probe", None) + self.model = kwargs.get("model", None) + self.model_mount_path = kwargs.get("model_mount_path", None) self.provisioning_state = None - self.readiness_probe = kwargs.get('readiness_probe', None) - self.request_settings = kwargs.get('request_settings', None) - self.scale_settings = kwargs.get('scale_settings', None) + self.readiness_probe = kwargs.get("readiness_probe", None) + self.request_settings = kwargs.get("request_settings", None) + self.scale_settings = kwargs.get("scale_settings", None) class KubernetesOnlineDeployment(OnlineDeploymentProperties): @@ -20616,36 +19545,35 @@ class KubernetesOnlineDeployment(OnlineDeploymentProperties): """ _validation = { - 'endpoint_compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, + "endpoint_compute_type": {"required": True}, + "provisioning_state": {"readonly": True}, } _attribute_map = { - 'code_configuration': {'key': 'codeConfiguration', 'type': 'CodeConfiguration'}, - 'description': {'key': 'description', 'type': 'str'}, - 'environment_id': {'key': 'environmentId', 'type': 'str'}, - 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'}, - 'data_collector': {'key': 'dataCollector', 'type': 'DataCollector'}, - 'egress_public_network_access': {'key': 'egressPublicNetworkAccess', 'type': 'str'}, - 'endpoint_compute_type': {'key': 'endpointComputeType', 'type': 'str'}, - 'instance_type': {'key': 'instanceType', 'type': 'str'}, - 'liveness_probe': {'key': 'livenessProbe', 'type': 'ProbeSettings'}, - 'model': {'key': 'model', 'type': 'str'}, - 'model_mount_path': {'key': 'modelMountPath', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'readiness_probe': {'key': 'readinessProbe', 'type': 'ProbeSettings'}, - 'request_settings': {'key': 'requestSettings', 'type': 'OnlineRequestSettings'}, - 'scale_settings': {'key': 'scaleSettings', 'type': 'OnlineScaleSettings'}, - 'container_resource_requirements': {'key': 'containerResourceRequirements', - 'type': 'ContainerResourceRequirements'}, + "code_configuration": {"key": "codeConfiguration", "type": "CodeConfiguration"}, + "description": {"key": "description", "type": "str"}, + "environment_id": {"key": "environmentId", "type": "str"}, + "environment_variables": {"key": "environmentVariables", "type": "{str}"}, + "properties": {"key": "properties", "type": "{str}"}, + "app_insights_enabled": {"key": "appInsightsEnabled", "type": "bool"}, + "data_collector": {"key": "dataCollector", "type": "DataCollector"}, + "egress_public_network_access": {"key": "egressPublicNetworkAccess", "type": "str"}, + "endpoint_compute_type": {"key": "endpointComputeType", "type": "str"}, + "instance_type": {"key": "instanceType", "type": "str"}, + "liveness_probe": {"key": "livenessProbe", "type": "ProbeSettings"}, + "model": {"key": "model", "type": "str"}, + "model_mount_path": {"key": "modelMountPath", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "readiness_probe": {"key": "readinessProbe", "type": "ProbeSettings"}, + "request_settings": {"key": "requestSettings", "type": "OnlineRequestSettings"}, + "scale_settings": {"key": "scaleSettings", "type": "OnlineScaleSettings"}, + "container_resource_requirements": { + "key": "containerResourceRequirements", + "type": "ContainerResourceRequirements", + }, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword code_configuration: Code configuration for the endpoint deployment. :paramtype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration @@ -20691,8 +19619,8 @@ def __init__( ~azure.mgmt.machinelearningservices.models.ContainerResourceRequirements """ super(KubernetesOnlineDeployment, self).__init__(**kwargs) - self.endpoint_compute_type = 'Kubernetes' # type: str - self.container_resource_requirements = kwargs.get('container_resource_requirements', None) + self.endpoint_compute_type = "Kubernetes" # type: str + self.container_resource_requirements = kwargs.get("container_resource_requirements", None) class KubernetesProperties(msrest.serialization.Model): @@ -20718,20 +19646,17 @@ class KubernetesProperties(msrest.serialization.Model): """ _attribute_map = { - 'relay_connection_string': {'key': 'relayConnectionString', 'type': 'str'}, - 'service_bus_connection_string': {'key': 'serviceBusConnectionString', 'type': 'str'}, - 'extension_principal_id': {'key': 'extensionPrincipalId', 'type': 'str'}, - 'extension_instance_release_train': {'key': 'extensionInstanceReleaseTrain', 'type': 'str'}, - 'vc_name': {'key': 'vcName', 'type': 'str'}, - 'namespace': {'key': 'namespace', 'type': 'str'}, - 'default_instance_type': {'key': 'defaultInstanceType', 'type': 'str'}, - 'instance_types': {'key': 'instanceTypes', 'type': '{InstanceTypeSchema}'}, + "relay_connection_string": {"key": "relayConnectionString", "type": "str"}, + "service_bus_connection_string": {"key": "serviceBusConnectionString", "type": "str"}, + "extension_principal_id": {"key": "extensionPrincipalId", "type": "str"}, + "extension_instance_release_train": {"key": "extensionInstanceReleaseTrain", "type": "str"}, + "vc_name": {"key": "vcName", "type": "str"}, + "namespace": {"key": "namespace", "type": "str"}, + "default_instance_type": {"key": "defaultInstanceType", "type": "str"}, + "instance_types": {"key": "instanceTypes", "type": "{InstanceTypeSchema}"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword relay_connection_string: Relay connection string. :paramtype relay_connection_string: str @@ -20752,14 +19677,14 @@ def __init__( ~azure.mgmt.machinelearningservices.models.InstanceTypeSchema] """ super(KubernetesProperties, self).__init__(**kwargs) - self.relay_connection_string = kwargs.get('relay_connection_string', None) - self.service_bus_connection_string = kwargs.get('service_bus_connection_string', None) - self.extension_principal_id = kwargs.get('extension_principal_id', None) - self.extension_instance_release_train = kwargs.get('extension_instance_release_train', None) - self.vc_name = kwargs.get('vc_name', None) - self.namespace = kwargs.get('namespace', "default") - self.default_instance_type = kwargs.get('default_instance_type', None) - self.instance_types = kwargs.get('instance_types', None) + self.relay_connection_string = kwargs.get("relay_connection_string", None) + self.service_bus_connection_string = kwargs.get("service_bus_connection_string", None) + self.extension_principal_id = kwargs.get("extension_principal_id", None) + self.extension_instance_release_train = kwargs.get("extension_instance_release_train", None) + self.vc_name = kwargs.get("vc_name", None) + self.namespace = kwargs.get("namespace", "default") + self.default_instance_type = kwargs.get("default_instance_type", None) + self.instance_types = kwargs.get("instance_types", None) class LabelCategory(msrest.serialization.Model): @@ -20775,15 +19700,12 @@ class LabelCategory(msrest.serialization.Model): """ _attribute_map = { - 'classes': {'key': 'classes', 'type': '{LabelClass}'}, - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'multi_select': {'key': 'multiSelect', 'type': 'str'}, + "classes": {"key": "classes", "type": "{LabelClass}"}, + "display_name": {"key": "displayName", "type": "str"}, + "multi_select": {"key": "multiSelect", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword classes: Dictionary of label classes in this category. :paramtype classes: dict[str, ~azure.mgmt.machinelearningservices.models.LabelClass] @@ -20794,9 +19716,9 @@ def __init__( :paramtype multi_select: str or ~azure.mgmt.machinelearningservices.models.MultiSelect """ super(LabelCategory, self).__init__(**kwargs) - self.classes = kwargs.get('classes', None) - self.display_name = kwargs.get('display_name', None) - self.multi_select = kwargs.get('multi_select', None) + self.classes = kwargs.get("classes", None) + self.display_name = kwargs.get("display_name", None) + self.multi_select = kwargs.get("multi_select", None) class LabelClass(msrest.serialization.Model): @@ -20809,14 +19731,11 @@ class LabelClass(msrest.serialization.Model): """ _attribute_map = { - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'subclasses': {'key': 'subclasses', 'type': '{LabelClass}'}, + "display_name": {"key": "displayName", "type": "str"}, + "subclasses": {"key": "subclasses", "type": "{LabelClass}"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword display_name: Display name of the label class. :paramtype display_name: str @@ -20824,8 +19743,8 @@ def __init__( :paramtype subclasses: dict[str, ~azure.mgmt.machinelearningservices.models.LabelClass] """ super(LabelClass, self).__init__(**kwargs) - self.display_name = kwargs.get('display_name', None) - self.subclasses = kwargs.get('subclasses', None) + self.display_name = kwargs.get("display_name", None) + self.subclasses = kwargs.get("subclasses", None) class LabelingDataConfiguration(msrest.serialization.Model): @@ -20840,14 +19759,11 @@ class LabelingDataConfiguration(msrest.serialization.Model): """ _attribute_map = { - 'data_id': {'key': 'dataId', 'type': 'str'}, - 'incremental_data_refresh': {'key': 'incrementalDataRefresh', 'type': 'str'}, + "data_id": {"key": "dataId", "type": "str"}, + "incremental_data_refresh": {"key": "incrementalDataRefresh", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword data_id: Resource Id of the data asset to perform labeling. :paramtype data_id: str @@ -20857,8 +19773,8 @@ def __init__( ~azure.mgmt.machinelearningservices.models.IncrementalDataRefresh """ super(LabelingDataConfiguration, self).__init__(**kwargs) - self.data_id = kwargs.get('data_id', None) - self.incremental_data_refresh = kwargs.get('incremental_data_refresh', None) + self.data_id = kwargs.get("data_id", None) + self.incremental_data_refresh = kwargs.get("incremental_data_refresh", None) class LabelingJob(ProxyResource): @@ -20884,31 +19800,28 @@ class LabelingJob(ProxyResource): """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'properties': {'key': 'properties', 'type': 'LabelingJobProperties'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "LabelingJobProperties"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword properties: Required. [Required] Additional attributes of the entity. :paramtype properties: ~azure.mgmt.machinelearningservices.models.LabelingJobProperties """ super(LabelingJob, self).__init__(**kwargs) - self.properties = kwargs['properties'] + self.properties = kwargs["properties"] class LabelingJobMediaProperties(msrest.serialization.Model): @@ -20925,23 +19838,17 @@ class LabelingJobMediaProperties(msrest.serialization.Model): """ _validation = { - 'media_type': {'required': True}, + "media_type": {"required": True}, } _attribute_map = { - 'media_type': {'key': 'mediaType', 'type': 'str'}, + "media_type": {"key": "mediaType", "type": "str"}, } - _subtype_map = { - 'media_type': {'Image': 'LabelingJobImageProperties', 'Text': 'LabelingJobTextProperties'} - } + _subtype_map = {"media_type": {"Image": "LabelingJobImageProperties", "Text": "LabelingJobTextProperties"}} - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(LabelingJobMediaProperties, self).__init__(**kwargs) self.media_type = None # type: Optional[str] @@ -20960,18 +19867,15 @@ class LabelingJobImageProperties(LabelingJobMediaProperties): """ _validation = { - 'media_type': {'required': True}, + "media_type": {"required": True}, } _attribute_map = { - 'media_type': {'key': 'mediaType', 'type': 'str'}, - 'annotation_type': {'key': 'annotationType', 'type': 'str'}, + "media_type": {"key": "mediaType", "type": "str"}, + "annotation_type": {"key": "annotationType", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword annotation_type: Annotation type of image labeling job. Possible values include: "Classification", "BoundingBox", "InstanceSegmentation". @@ -20979,8 +19883,8 @@ def __init__( ~azure.mgmt.machinelearningservices.models.ImageAnnotationType """ super(LabelingJobImageProperties, self).__init__(**kwargs) - self.media_type = 'Image' # type: str - self.annotation_type = kwargs.get('annotation_type', None) + self.media_type = "Image" # type: str + self.annotation_type = kwargs.get("annotation_type", None) class LabelingJobInstructions(msrest.serialization.Model): @@ -20991,19 +19895,16 @@ class LabelingJobInstructions(msrest.serialization.Model): """ _attribute_map = { - 'uri': {'key': 'uri', 'type': 'str'}, + "uri": {"key": "uri", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword uri: The link to a page with detailed labeling instructions for labelers. :paramtype uri: str """ super(LabelingJobInstructions, self).__init__(**kwargs) - self.uri = kwargs.get('uri', None) + self.uri = kwargs.get("uri", None) class LabelingJobProperties(JobBaseProperties): @@ -21078,46 +19979,43 @@ class LabelingJobProperties(JobBaseProperties): """ _validation = { - 'job_type': {'required': True}, - 'status': {'readonly': True}, - 'created_date_time': {'readonly': True}, - 'progress_metrics': {'readonly': True}, - 'project_id': {'readonly': True}, - 'provisioning_state': {'readonly': True}, - 'status_messages': {'readonly': True}, - } - - _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'component_id': {'key': 'componentId', 'type': 'str'}, - 'compute_id': {'key': 'computeId', 'type': 'str'}, - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'experiment_name': {'key': 'experimentName', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'IdentityConfiguration'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'job_type': {'key': 'jobType', 'type': 'str'}, - 'notification_setting': {'key': 'notificationSetting', 'type': 'NotificationSetting'}, - 'secrets_configuration': {'key': 'secretsConfiguration', 'type': '{SecretConfiguration}'}, - 'services': {'key': 'services', 'type': '{JobService}'}, - 'status': {'key': 'status', 'type': 'str'}, - 'created_date_time': {'key': 'createdDateTime', 'type': 'iso-8601'}, - 'data_configuration': {'key': 'dataConfiguration', 'type': 'LabelingDataConfiguration'}, - 'job_instructions': {'key': 'jobInstructions', 'type': 'LabelingJobInstructions'}, - 'label_categories': {'key': 'labelCategories', 'type': '{LabelCategory}'}, - 'labeling_job_media_properties': {'key': 'labelingJobMediaProperties', 'type': 'LabelingJobMediaProperties'}, - 'ml_assist_configuration': {'key': 'mlAssistConfiguration', 'type': 'MLAssistConfiguration'}, - 'progress_metrics': {'key': 'progressMetrics', 'type': 'ProgressMetrics'}, - 'project_id': {'key': 'projectId', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'status_messages': {'key': 'statusMessages', 'type': '[StatusMessage]'}, - } - - def __init__( - self, - **kwargs - ): + "job_type": {"required": True}, + "status": {"readonly": True}, + "created_date_time": {"readonly": True}, + "progress_metrics": {"readonly": True}, + "project_id": {"readonly": True}, + "provisioning_state": {"readonly": True}, + "status_messages": {"readonly": True}, + } + + _attribute_map = { + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "component_id": {"key": "componentId", "type": "str"}, + "compute_id": {"key": "computeId", "type": "str"}, + "display_name": {"key": "displayName", "type": "str"}, + "experiment_name": {"key": "experimentName", "type": "str"}, + "identity": {"key": "identity", "type": "IdentityConfiguration"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "job_type": {"key": "jobType", "type": "str"}, + "notification_setting": {"key": "notificationSetting", "type": "NotificationSetting"}, + "secrets_configuration": {"key": "secretsConfiguration", "type": "{SecretConfiguration}"}, + "services": {"key": "services", "type": "{JobService}"}, + "status": {"key": "status", "type": "str"}, + "created_date_time": {"key": "createdDateTime", "type": "iso-8601"}, + "data_configuration": {"key": "dataConfiguration", "type": "LabelingDataConfiguration"}, + "job_instructions": {"key": "jobInstructions", "type": "LabelingJobInstructions"}, + "label_categories": {"key": "labelCategories", "type": "{LabelCategory}"}, + "labeling_job_media_properties": {"key": "labelingJobMediaProperties", "type": "LabelingJobMediaProperties"}, + "ml_assist_configuration": {"key": "mlAssistConfiguration", "type": "MLAssistConfiguration"}, + "progress_metrics": {"key": "progressMetrics", "type": "ProgressMetrics"}, + "project_id": {"key": "projectId", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "status_messages": {"key": "statusMessages", "type": "[StatusMessage]"}, + } + + def __init__(self, **kwargs): """ :keyword description: The asset description text. :paramtype description: str @@ -21164,13 +20062,13 @@ def __init__( ~azure.mgmt.machinelearningservices.models.MLAssistConfiguration """ super(LabelingJobProperties, self).__init__(**kwargs) - self.job_type = 'Labeling' # type: str + self.job_type = "Labeling" # type: str self.created_date_time = None - self.data_configuration = kwargs.get('data_configuration', None) - self.job_instructions = kwargs.get('job_instructions', None) - self.label_categories = kwargs.get('label_categories', None) - self.labeling_job_media_properties = kwargs.get('labeling_job_media_properties', None) - self.ml_assist_configuration = kwargs.get('ml_assist_configuration', None) + self.data_configuration = kwargs.get("data_configuration", None) + self.job_instructions = kwargs.get("job_instructions", None) + self.label_categories = kwargs.get("label_categories", None) + self.labeling_job_media_properties = kwargs.get("labeling_job_media_properties", None) + self.ml_assist_configuration = kwargs.get("ml_assist_configuration", None) self.progress_metrics = None self.project_id = None self.provisioning_state = None @@ -21188,14 +20086,11 @@ class LabelingJobResourceArmPaginatedResult(msrest.serialization.Model): """ _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[LabelingJob]'}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[LabelingJob]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword next_link: The link to the next page of LabelingJob objects. If null, there are no additional pages. @@ -21204,8 +20099,8 @@ def __init__( :paramtype value: list[~azure.mgmt.machinelearningservices.models.LabelingJob] """ super(LabelingJobResourceArmPaginatedResult, self).__init__(**kwargs) - self.next_link = kwargs.get('next_link', None) - self.value = kwargs.get('value', None) + self.next_link = kwargs.get("next_link", None) + self.value = kwargs.get("value", None) class LabelingJobTextProperties(LabelingJobMediaProperties): @@ -21222,18 +20117,15 @@ class LabelingJobTextProperties(LabelingJobMediaProperties): """ _validation = { - 'media_type': {'required': True}, + "media_type": {"required": True}, } _attribute_map = { - 'media_type': {'key': 'mediaType', 'type': 'str'}, - 'annotation_type': {'key': 'annotationType', 'type': 'str'}, + "media_type": {"key": "mediaType", "type": "str"}, + "annotation_type": {"key": "annotationType", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword annotation_type: Annotation type of text labeling job. Possible values include: "Classification", "NamedEntityRecognition". @@ -21241,8 +20133,8 @@ def __init__( ~azure.mgmt.machinelearningservices.models.TextAnnotationType """ super(LabelingJobTextProperties, self).__init__(**kwargs) - self.media_type = 'Text' # type: str - self.annotation_type = kwargs.get('annotation_type', None) + self.media_type = "Text" # type: str + self.annotation_type = kwargs.get("annotation_type", None) class OneLakeArtifact(msrest.serialization.Model): @@ -21261,29 +20153,24 @@ class OneLakeArtifact(msrest.serialization.Model): """ _validation = { - 'artifact_name': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'artifact_type': {'required': True}, + "artifact_name": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "artifact_type": {"required": True}, } _attribute_map = { - 'artifact_name': {'key': 'artifactName', 'type': 'str'}, - 'artifact_type': {'key': 'artifactType', 'type': 'str'}, + "artifact_name": {"key": "artifactName", "type": "str"}, + "artifact_type": {"key": "artifactType", "type": "str"}, } - _subtype_map = { - 'artifact_type': {'LakeHouse': 'LakeHouseArtifact'} - } + _subtype_map = {"artifact_type": {"LakeHouse": "LakeHouseArtifact"}} - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword artifact_name: Required. [Required] OneLake artifact name. :paramtype artifact_name: str """ super(OneLakeArtifact, self).__init__(**kwargs) - self.artifact_name = kwargs['artifact_name'] + self.artifact_name = kwargs["artifact_name"] self.artifact_type = None # type: Optional[str] @@ -21300,25 +20187,22 @@ class LakeHouseArtifact(OneLakeArtifact): """ _validation = { - 'artifact_name': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'artifact_type': {'required': True}, + "artifact_name": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "artifact_type": {"required": True}, } _attribute_map = { - 'artifact_name': {'key': 'artifactName', 'type': 'str'}, - 'artifact_type': {'key': 'artifactType', 'type': 'str'}, + "artifact_name": {"key": "artifactName", "type": "str"}, + "artifact_type": {"key": "artifactType", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword artifact_name: Required. [Required] OneLake artifact name. :paramtype artifact_name: str """ super(LakeHouseArtifact, self).__init__(**kwargs) - self.artifact_type = 'LakeHouse' # type: str + self.artifact_type = "LakeHouse" # type: str class ListAmlUserFeatureResult(msrest.serialization.Model): @@ -21334,21 +20218,17 @@ class ListAmlUserFeatureResult(msrest.serialization.Model): """ _validation = { - 'value': {'readonly': True}, - 'next_link': {'readonly': True}, + "value": {"readonly": True}, + "next_link": {"readonly": True}, } _attribute_map = { - 'value': {'key': 'value', 'type': '[AmlUserFeature]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + "value": {"key": "value", "type": "[AmlUserFeature]"}, + "next_link": {"key": "nextLink", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(ListAmlUserFeatureResult, self).__init__(**kwargs) self.value = None self.next_link = None @@ -21366,21 +20246,17 @@ class ListNotebookKeysResult(msrest.serialization.Model): """ _validation = { - 'primary_access_key': {'readonly': True}, - 'secondary_access_key': {'readonly': True}, + "primary_access_key": {"readonly": True}, + "secondary_access_key": {"readonly": True}, } _attribute_map = { - 'primary_access_key': {'key': 'primaryAccessKey', 'type': 'str'}, - 'secondary_access_key': {'key': 'secondaryAccessKey', 'type': 'str'}, + "primary_access_key": {"key": "primaryAccessKey", "type": "str"}, + "secondary_access_key": {"key": "secondaryAccessKey", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(ListNotebookKeysResult, self).__init__(**kwargs) self.primary_access_key = None self.secondary_access_key = None @@ -21396,19 +20272,15 @@ class ListStorageAccountKeysResult(msrest.serialization.Model): """ _validation = { - 'user_storage_key': {'readonly': True}, + "user_storage_key": {"readonly": True}, } _attribute_map = { - 'user_storage_key': {'key': 'userStorageKey', 'type': 'str'}, + "user_storage_key": {"key": "userStorageKey", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(ListStorageAccountKeysResult, self).__init__(**kwargs) self.user_storage_key = None @@ -21426,21 +20298,17 @@ class ListUsagesResult(msrest.serialization.Model): """ _validation = { - 'value': {'readonly': True}, - 'next_link': {'readonly': True}, + "value": {"readonly": True}, + "next_link": {"readonly": True}, } _attribute_map = { - 'value': {'key': 'value', 'type': '[Usage]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + "value": {"key": "value", "type": "[Usage]"}, + "next_link": {"key": "nextLink", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(ListUsagesResult, self).__init__(**kwargs) self.value = None self.next_link = None @@ -21466,24 +20334,23 @@ class ListWorkspaceKeysResult(msrest.serialization.Model): """ _validation = { - 'app_insights_instrumentation_key': {'readonly': True}, - 'user_storage_arm_id': {'readonly': True}, - 'user_storage_key': {'readonly': True}, + "app_insights_instrumentation_key": {"readonly": True}, + "user_storage_arm_id": {"readonly": True}, + "user_storage_key": {"readonly": True}, } _attribute_map = { - 'app_insights_instrumentation_key': {'key': 'appInsightsInstrumentationKey', 'type': 'str'}, - 'container_registry_credentials': {'key': 'containerRegistryCredentials', - 'type': 'RegistryListCredentialsResult'}, - 'notebook_access_keys': {'key': 'notebookAccessKeys', 'type': 'ListNotebookKeysResult'}, - 'user_storage_arm_id': {'key': 'userStorageArmId', 'type': 'str'}, - 'user_storage_key': {'key': 'userStorageKey', 'type': 'str'}, + "app_insights_instrumentation_key": {"key": "appInsightsInstrumentationKey", "type": "str"}, + "container_registry_credentials": { + "key": "containerRegistryCredentials", + "type": "RegistryListCredentialsResult", + }, + "notebook_access_keys": {"key": "notebookAccessKeys", "type": "ListNotebookKeysResult"}, + "user_storage_arm_id": {"key": "userStorageArmId", "type": "str"}, + "user_storage_key": {"key": "userStorageKey", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword container_registry_credentials: :paramtype container_registry_credentials: @@ -21494,8 +20361,8 @@ def __init__( """ super(ListWorkspaceKeysResult, self).__init__(**kwargs) self.app_insights_instrumentation_key = None - self.container_registry_credentials = kwargs.get('container_registry_credentials', None) - self.notebook_access_keys = kwargs.get('notebook_access_keys', None) + self.container_registry_credentials = kwargs.get("container_registry_credentials", None) + self.notebook_access_keys = kwargs.get("notebook_access_keys", None) self.user_storage_arm_id = None self.user_storage_key = None @@ -21513,21 +20380,17 @@ class ListWorkspaceQuotas(msrest.serialization.Model): """ _validation = { - 'value': {'readonly': True}, - 'next_link': {'readonly': True}, + "value": {"readonly": True}, + "next_link": {"readonly": True}, } _attribute_map = { - 'value': {'key': 'value', 'type': '[ResourceQuota]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + "value": {"key": "value", "type": "[ResourceQuota]"}, + "next_link": {"key": "nextLink", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(ListWorkspaceQuotas, self).__init__(**kwargs) self.value = None self.next_link = None @@ -21549,20 +20412,17 @@ class LiteralJobInput(JobInput): """ _validation = { - 'job_input_type': {'required': True}, - 'value': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "job_input_type": {"required": True}, + "value": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'job_input_type': {'key': 'jobInputType', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "job_input_type": {"key": "jobInputType", "type": "str"}, + "value": {"key": "value", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword description: Description for the input. :paramtype description: str @@ -21570,8 +20430,8 @@ def __init__( :paramtype value: str """ super(LiteralJobInput, self).__init__(**kwargs) - self.job_input_type = 'literal' # type: str - self.value = kwargs['value'] + self.job_input_type = "literal" # type: str + self.value = kwargs["value"] class ManagedComputeIdentity(MonitorComputeIdentityBase): @@ -21588,25 +20448,22 @@ class ManagedComputeIdentity(MonitorComputeIdentityBase): """ _validation = { - 'compute_identity_type': {'required': True}, + "compute_identity_type": {"required": True}, } _attribute_map = { - 'compute_identity_type': {'key': 'computeIdentityType', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'}, + "compute_identity_type": {"key": "computeIdentityType", "type": "str"}, + "identity": {"key": "identity", "type": "ManagedServiceIdentity"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword identity: Managed service identity (system assigned and/or user assigned identities). :paramtype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity """ super(ManagedComputeIdentity, self).__init__(**kwargs) - self.compute_identity_type = 'ManagedIdentity' # type: str - self.identity = kwargs.get('identity', None) + self.compute_identity_type = "ManagedIdentity" # type: str + self.identity = kwargs.get("identity", None) class ManagedIdentity(IdentityConfiguration): @@ -21630,20 +20487,17 @@ class ManagedIdentity(IdentityConfiguration): """ _validation = { - 'identity_type': {'required': True}, + "identity_type": {"required": True}, } _attribute_map = { - 'identity_type': {'key': 'identityType', 'type': 'str'}, - 'client_id': {'key': 'clientId', 'type': 'str'}, - 'object_id': {'key': 'objectId', 'type': 'str'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, + "identity_type": {"key": "identityType", "type": "str"}, + "client_id": {"key": "clientId", "type": "str"}, + "object_id": {"key": "objectId", "type": "str"}, + "resource_id": {"key": "resourceId", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword client_id: Specifies a user-assigned identity by client ID. For system-assigned, do not set this field. @@ -21656,10 +20510,10 @@ def __init__( :paramtype resource_id: str """ super(ManagedIdentity, self).__init__(**kwargs) - self.identity_type = 'Managed' # type: str - self.client_id = kwargs.get('client_id', None) - self.object_id = kwargs.get('object_id', None) - self.resource_id = kwargs.get('resource_id', None) + self.identity_type = "Managed" # type: str + self.client_id = kwargs.get("client_id", None) + self.object_id = kwargs.get("object_id", None) + self.resource_id = kwargs.get("resource_id", None) class ManagedIdentityAuthTypeWorkspaceConnectionProperties(WorkspaceConnectionPropertiesV2): @@ -21712,28 +20566,25 @@ class ManagedIdentityAuthTypeWorkspaceConnectionProperties(WorkspaceConnectionPr """ _validation = { - 'auth_type': {'required': True}, - 'created_by_workspace_arm_id': {'readonly': True}, - 'group': {'readonly': True}, + "auth_type": {"required": True}, + "created_by_workspace_arm_id": {"readonly": True}, + "group": {"readonly": True}, } _attribute_map = { - 'auth_type': {'key': 'authType', 'type': 'str'}, - 'category': {'key': 'category', 'type': 'str'}, - 'created_by_workspace_arm_id': {'key': 'createdByWorkspaceArmId', 'type': 'str'}, - 'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'}, - 'group': {'key': 'group', 'type': 'str'}, - 'is_shared_to_all': {'key': 'isSharedToAll', 'type': 'bool'}, - 'metadata': {'key': 'metadata', 'type': 'object'}, - 'shared_user_list': {'key': 'sharedUserList', 'type': '[str]'}, - 'target': {'key': 'target', 'type': 'str'}, - 'credentials': {'key': 'credentials', 'type': 'WorkspaceConnectionManagedIdentity'}, + "auth_type": {"key": "authType", "type": "str"}, + "category": {"key": "category", "type": "str"}, + "created_by_workspace_arm_id": {"key": "createdByWorkspaceArmId", "type": "str"}, + "expiry_time": {"key": "expiryTime", "type": "iso-8601"}, + "group": {"key": "group", "type": "str"}, + "is_shared_to_all": {"key": "isSharedToAll", "type": "bool"}, + "metadata": {"key": "metadata", "type": "object"}, + "shared_user_list": {"key": "sharedUserList", "type": "[str]"}, + "target": {"key": "target", "type": "str"}, + "credentials": {"key": "credentials", "type": "WorkspaceConnectionManagedIdentity"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword category: Category of the connection. Possible values include: "PythonFeed", "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", @@ -21768,8 +20619,8 @@ def __init__( ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionManagedIdentity """ super(ManagedIdentityAuthTypeWorkspaceConnectionProperties, self).__init__(**kwargs) - self.auth_type = 'ManagedIdentity' # type: str - self.credentials = kwargs.get('credentials', None) + self.auth_type = "ManagedIdentity" # type: str + self.credentials = kwargs.get("credentials", None) class ManagedIdentityCredential(DataReferenceCredential): @@ -21799,22 +20650,19 @@ class ManagedIdentityCredential(DataReferenceCredential): """ _validation = { - 'credential_type': {'required': True}, + "credential_type": {"required": True}, } _attribute_map = { - 'credential_type': {'key': 'credentialType', 'type': 'str'}, - 'managed_identity_type': {'key': 'managedIdentityType', 'type': 'str'}, - 'user_managed_identity_client_id': {'key': 'userManagedIdentityClientId', 'type': 'str'}, - 'user_managed_identity_principal_id': {'key': 'userManagedIdentityPrincipalId', 'type': 'str'}, - 'user_managed_identity_resource_id': {'key': 'userManagedIdentityResourceId', 'type': 'str'}, - 'user_managed_identity_tenant_id': {'key': 'userManagedIdentityTenantId', 'type': 'str'}, + "credential_type": {"key": "credentialType", "type": "str"}, + "managed_identity_type": {"key": "managedIdentityType", "type": "str"}, + "user_managed_identity_client_id": {"key": "userManagedIdentityClientId", "type": "str"}, + "user_managed_identity_principal_id": {"key": "userManagedIdentityPrincipalId", "type": "str"}, + "user_managed_identity_resource_id": {"key": "userManagedIdentityResourceId", "type": "str"}, + "user_managed_identity_tenant_id": {"key": "userManagedIdentityTenantId", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword managed_identity_type: ManagedIdentityCredential identity type. :paramtype managed_identity_type: str @@ -21832,12 +20680,12 @@ def __init__( :paramtype user_managed_identity_tenant_id: str """ super(ManagedIdentityCredential, self).__init__(**kwargs) - self.credential_type = 'ManagedIdentity' # type: str - self.managed_identity_type = kwargs.get('managed_identity_type', None) - self.user_managed_identity_client_id = kwargs.get('user_managed_identity_client_id', None) - self.user_managed_identity_principal_id = kwargs.get('user_managed_identity_principal_id', None) - self.user_managed_identity_resource_id = kwargs.get('user_managed_identity_resource_id', None) - self.user_managed_identity_tenant_id = kwargs.get('user_managed_identity_tenant_id', None) + self.credential_type = "ManagedIdentity" # type: str + self.managed_identity_type = kwargs.get("managed_identity_type", None) + self.user_managed_identity_client_id = kwargs.get("user_managed_identity_client_id", None) + self.user_managed_identity_principal_id = kwargs.get("user_managed_identity_principal_id", None) + self.user_managed_identity_resource_id = kwargs.get("user_managed_identity_resource_id", None) + self.user_managed_identity_tenant_id = kwargs.get("user_managed_identity_tenant_id", None) class ManagedNetworkProvisionOptions(msrest.serialization.Model): @@ -21848,19 +20696,16 @@ class ManagedNetworkProvisionOptions(msrest.serialization.Model): """ _attribute_map = { - 'include_spark': {'key': 'includeSpark', 'type': 'bool'}, + "include_spark": {"key": "includeSpark", "type": "bool"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword include_spark: :paramtype include_spark: bool """ super(ManagedNetworkProvisionOptions, self).__init__(**kwargs) - self.include_spark = kwargs.get('include_spark', None) + self.include_spark = kwargs.get("include_spark", None) class ManagedNetworkProvisionStatus(msrest.serialization.Model): @@ -21874,14 +20719,11 @@ class ManagedNetworkProvisionStatus(msrest.serialization.Model): """ _attribute_map = { - 'spark_ready': {'key': 'sparkReady', 'type': 'bool'}, - 'status': {'key': 'status', 'type': 'str'}, + "spark_ready": {"key": "sparkReady", "type": "bool"}, + "status": {"key": "status", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword spark_ready: :paramtype spark_ready: bool @@ -21890,8 +20732,8 @@ def __init__( :paramtype status: str or ~azure.mgmt.machinelearningservices.models.ManagedNetworkStatus """ super(ManagedNetworkProvisionStatus, self).__init__(**kwargs) - self.spark_ready = kwargs.get('spark_ready', None) - self.status = kwargs.get('status', None) + self.spark_ready = kwargs.get("spark_ready", None) + self.status = kwargs.get("status", None) class ManagedNetworkSettings(msrest.serialization.Model): @@ -21915,22 +20757,19 @@ class ManagedNetworkSettings(msrest.serialization.Model): """ _validation = { - 'network_id': {'readonly': True}, - 'changeable_isolation_modes': {'readonly': True}, + "network_id": {"readonly": True}, + "changeable_isolation_modes": {"readonly": True}, } _attribute_map = { - 'isolation_mode': {'key': 'isolationMode', 'type': 'str'}, - 'network_id': {'key': 'networkId', 'type': 'str'}, - 'outbound_rules': {'key': 'outboundRules', 'type': '{OutboundRule}'}, - 'status': {'key': 'status', 'type': 'ManagedNetworkProvisionStatus'}, - 'changeable_isolation_modes': {'key': 'changeableIsolationModes', 'type': '[str]'}, + "isolation_mode": {"key": "isolationMode", "type": "str"}, + "network_id": {"key": "networkId", "type": "str"}, + "outbound_rules": {"key": "outboundRules", "type": "{OutboundRule}"}, + "status": {"key": "status", "type": "ManagedNetworkProvisionStatus"}, + "changeable_isolation_modes": {"key": "changeableIsolationModes", "type": "[str]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword isolation_mode: Isolation mode for the managed network of a machine learning workspace. Possible values include: "Disabled", "AllowInternetOutbound", @@ -21943,10 +20782,10 @@ def __init__( :paramtype status: ~azure.mgmt.machinelearningservices.models.ManagedNetworkProvisionStatus """ super(ManagedNetworkSettings, self).__init__(**kwargs) - self.isolation_mode = kwargs.get('isolation_mode', None) + self.isolation_mode = kwargs.get("isolation_mode", None) self.network_id = None - self.outbound_rules = kwargs.get('outbound_rules', None) - self.status = kwargs.get('status', None) + self.outbound_rules = kwargs.get("outbound_rules", None) + self.status = kwargs.get("status", None) self.changeable_isolation_modes = None @@ -22006,34 +20845,31 @@ class ManagedOnlineDeployment(OnlineDeploymentProperties): """ _validation = { - 'endpoint_compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, + "endpoint_compute_type": {"required": True}, + "provisioning_state": {"readonly": True}, } _attribute_map = { - 'code_configuration': {'key': 'codeConfiguration', 'type': 'CodeConfiguration'}, - 'description': {'key': 'description', 'type': 'str'}, - 'environment_id': {'key': 'environmentId', 'type': 'str'}, - 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'}, - 'data_collector': {'key': 'dataCollector', 'type': 'DataCollector'}, - 'egress_public_network_access': {'key': 'egressPublicNetworkAccess', 'type': 'str'}, - 'endpoint_compute_type': {'key': 'endpointComputeType', 'type': 'str'}, - 'instance_type': {'key': 'instanceType', 'type': 'str'}, - 'liveness_probe': {'key': 'livenessProbe', 'type': 'ProbeSettings'}, - 'model': {'key': 'model', 'type': 'str'}, - 'model_mount_path': {'key': 'modelMountPath', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'readiness_probe': {'key': 'readinessProbe', 'type': 'ProbeSettings'}, - 'request_settings': {'key': 'requestSettings', 'type': 'OnlineRequestSettings'}, - 'scale_settings': {'key': 'scaleSettings', 'type': 'OnlineScaleSettings'}, + "code_configuration": {"key": "codeConfiguration", "type": "CodeConfiguration"}, + "description": {"key": "description", "type": "str"}, + "environment_id": {"key": "environmentId", "type": "str"}, + "environment_variables": {"key": "environmentVariables", "type": "{str}"}, + "properties": {"key": "properties", "type": "{str}"}, + "app_insights_enabled": {"key": "appInsightsEnabled", "type": "bool"}, + "data_collector": {"key": "dataCollector", "type": "DataCollector"}, + "egress_public_network_access": {"key": "egressPublicNetworkAccess", "type": "str"}, + "endpoint_compute_type": {"key": "endpointComputeType", "type": "str"}, + "instance_type": {"key": "instanceType", "type": "str"}, + "liveness_probe": {"key": "livenessProbe", "type": "ProbeSettings"}, + "model": {"key": "model", "type": "str"}, + "model_mount_path": {"key": "modelMountPath", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "readiness_probe": {"key": "readinessProbe", "type": "ProbeSettings"}, + "request_settings": {"key": "requestSettings", "type": "OnlineRequestSettings"}, + "scale_settings": {"key": "scaleSettings", "type": "OnlineScaleSettings"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword code_configuration: Code configuration for the endpoint deployment. :paramtype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration @@ -22075,7 +20911,7 @@ def __init__( :paramtype scale_settings: ~azure.mgmt.machinelearningservices.models.OnlineScaleSettings """ super(ManagedOnlineDeployment, self).__init__(**kwargs) - self.endpoint_compute_type = 'Managed' # type: str + self.endpoint_compute_type = "Managed" # type: str class ManagedOnlineEndpointDeploymentResourceProperties(EndpointDeploymentResourceProperties): @@ -22096,26 +20932,23 @@ class ManagedOnlineEndpointDeploymentResourceProperties(EndpointDeploymentResour """ _validation = { - 'provisioning_state': {'readonly': True}, - 'type': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9._]'}, + "provisioning_state": {"readonly": True}, + "type": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9._]"}, } _attribute_map = { - 'failure_reason': {'key': 'failureReason', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, + "failure_reason": {"key": "failureReason", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "type": {"key": "type", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword failure_reason: The failure reason if the creation failed. :paramtype failure_reason: str """ super(ManagedOnlineEndpointDeploymentResourceProperties, self).__init__(**kwargs) - self.type = 'managedOnlineEndpoint' # type: str + self.type = "managedOnlineEndpoint" # type: str class ManagedOnlineEndpointResourceProperties(EndpointResourceProperties): @@ -22145,23 +20978,20 @@ class ManagedOnlineEndpointResourceProperties(EndpointResourceProperties): """ _validation = { - 'endpoint_type': {'required': True}, - 'provisioning_state': {'readonly': True}, + "endpoint_type": {"required": True}, + "provisioning_state": {"readonly": True}, } _attribute_map = { - 'associated_resource_id': {'key': 'associatedResourceId', 'type': 'str'}, - 'endpoint_type': {'key': 'endpointType', 'type': 'str'}, - 'endpoint_uri': {'key': 'endpointUri', 'type': 'str'}, - 'failure_reason': {'key': 'failureReason', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + "associated_resource_id": {"key": "associatedResourceId", "type": "str"}, + "endpoint_type": {"key": "endpointType", "type": "str"}, + "endpoint_uri": {"key": "endpointUri", "type": "str"}, + "failure_reason": {"key": "failureReason", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword associated_resource_id: Byo resource id for creating the built-in model service endpoints. @@ -22174,7 +21004,7 @@ def __init__( :paramtype name: str """ super(ManagedOnlineEndpointResourceProperties, self).__init__(**kwargs) - self.endpoint_type = 'managedOnlineEndpoint' # type: str + self.endpoint_type = "managedOnlineEndpoint" # type: str class ManagedResourceGroupAssignedIdentities(msrest.serialization.Model): @@ -22185,19 +21015,16 @@ class ManagedResourceGroupAssignedIdentities(msrest.serialization.Model): """ _attribute_map = { - 'principal_id': {'key': 'principalId', 'type': 'str'}, + "principal_id": {"key": "principalId", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword principal_id: Identity principal Id. :paramtype principal_id: str """ super(ManagedResourceGroupAssignedIdentities, self).__init__(**kwargs) - self.principal_id = kwargs.get('principal_id', None) + self.principal_id = kwargs.get("principal_id", None) class ManagedResourceGroupSettings(msrest.serialization.Model): @@ -22209,20 +21036,17 @@ class ManagedResourceGroupSettings(msrest.serialization.Model): """ _attribute_map = { - 'assigned_identities': {'key': 'assignedIdentities', 'type': '[ManagedResourceGroupAssignedIdentities]'}, + "assigned_identities": {"key": "assignedIdentities", "type": "[ManagedResourceGroupAssignedIdentities]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword assigned_identities: List of assigned identities for the managed resource group. :paramtype assigned_identities: list[~azure.mgmt.machinelearningservices.models.ManagedResourceGroupAssignedIdentities] """ super(ManagedResourceGroupSettings, self).__init__(**kwargs) - self.assigned_identities = kwargs.get('assigned_identities', None) + self.assigned_identities = kwargs.get("assigned_identities", None) class ManagedServiceIdentity(msrest.serialization.Model): @@ -22251,22 +21075,19 @@ class ManagedServiceIdentity(msrest.serialization.Model): """ _validation = { - 'principal_id': {'readonly': True}, - 'tenant_id': {'readonly': True}, - 'type': {'required': True}, + "principal_id": {"readonly": True}, + "tenant_id": {"readonly": True}, + "type": {"required": True}, } _attribute_map = { - 'principal_id': {'key': 'principalId', 'type': 'str'}, - 'tenant_id': {'key': 'tenantId', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'user_assigned_identities': {'key': 'userAssignedIdentities', 'type': '{UserAssignedIdentity}'}, + "principal_id": {"key": "principalId", "type": "str"}, + "tenant_id": {"key": "tenantId", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "user_assigned_identities": {"key": "userAssignedIdentities", "type": "{UserAssignedIdentity}"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword type: Required. Type of managed service identity (where both SystemAssigned and UserAssigned types are allowed). Possible values include: "None", "SystemAssigned", @@ -22282,8 +21103,8 @@ def __init__( super(ManagedServiceIdentity, self).__init__(**kwargs) self.principal_id = None self.tenant_id = None - self.type = kwargs['type'] - self.user_assigned_identities = kwargs.get('user_assigned_identities', None) + self.type = kwargs["type"] + self.user_assigned_identities = kwargs.get("user_assigned_identities", None) class MarketplacePlan(msrest.serialization.Model): @@ -22300,23 +21121,19 @@ class MarketplacePlan(msrest.serialization.Model): """ _validation = { - 'offer_id': {'readonly': True}, - 'plan_id': {'readonly': True}, - 'publisher_id': {'readonly': True}, + "offer_id": {"readonly": True}, + "plan_id": {"readonly": True}, + "publisher_id": {"readonly": True}, } _attribute_map = { - 'offer_id': {'key': 'offerId', 'type': 'str'}, - 'plan_id': {'key': 'planId', 'type': 'str'}, - 'publisher_id': {'key': 'publisherId', 'type': 'str'}, + "offer_id": {"key": "offerId", "type": "str"}, + "plan_id": {"key": "planId", "type": "str"}, + "publisher_id": {"key": "publisherId", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(MarketplacePlan, self).__init__(**kwargs) self.offer_id = None self.plan_id = None @@ -22347,32 +21164,29 @@ class MarketplaceSubscription(ProxyResource): """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'properties': {'key': 'properties', 'type': 'MarketplaceSubscriptionProperties'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "MarketplaceSubscriptionProperties"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword properties: Required. [Required] Additional attributes of the entity. :paramtype properties: ~azure.mgmt.machinelearningservices.models.MarketplaceSubscriptionProperties """ super(MarketplaceSubscription, self).__init__(**kwargs) - self.properties = kwargs['properties'] + self.properties = kwargs["properties"] class MarketplaceSubscriptionProperties(msrest.serialization.Model): @@ -22398,23 +21212,20 @@ class MarketplaceSubscriptionProperties(msrest.serialization.Model): """ _validation = { - 'marketplace_plan': {'readonly': True}, - 'marketplace_subscription_status': {'readonly': True}, - 'model_id': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'provisioning_state': {'readonly': True}, + "marketplace_plan": {"readonly": True}, + "marketplace_subscription_status": {"readonly": True}, + "model_id": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "provisioning_state": {"readonly": True}, } _attribute_map = { - 'marketplace_plan': {'key': 'marketplacePlan', 'type': 'MarketplacePlan'}, - 'marketplace_subscription_status': {'key': 'marketplaceSubscriptionStatus', 'type': 'str'}, - 'model_id': {'key': 'modelId', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + "marketplace_plan": {"key": "marketplacePlan", "type": "MarketplacePlan"}, + "marketplace_subscription_status": {"key": "marketplaceSubscriptionStatus", "type": "str"}, + "model_id": {"key": "modelId", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword model_id: Required. [Required] Target Marketplace Model ID to create a Marketplace Subscription for. @@ -22423,7 +21234,7 @@ def __init__( super(MarketplaceSubscriptionProperties, self).__init__(**kwargs) self.marketplace_plan = None self.marketplace_subscription_status = None - self.model_id = kwargs['model_id'] + self.model_id = kwargs["model_id"] self.provisioning_state = None @@ -22438,14 +21249,11 @@ class MarketplaceSubscriptionResourceArmPaginatedResult(msrest.serialization.Mod """ _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[MarketplaceSubscription]'}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[MarketplaceSubscription]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword next_link: The link to the next page of MarketplaceSubscription objects. If null, there are no additional pages. @@ -22454,8 +21262,8 @@ def __init__( :paramtype value: list[~azure.mgmt.machinelearningservices.models.MarketplaceSubscription] """ super(MarketplaceSubscriptionResourceArmPaginatedResult, self).__init__(**kwargs) - self.next_link = kwargs.get('next_link', None) - self.value = kwargs.get('value', None) + self.next_link = kwargs.get("next_link", None) + self.value = kwargs.get("value", None) class MaterializationComputeResource(msrest.serialization.Model): @@ -22466,19 +21274,16 @@ class MaterializationComputeResource(msrest.serialization.Model): """ _attribute_map = { - 'instance_type': {'key': 'instanceType', 'type': 'str'}, + "instance_type": {"key": "instanceType", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword instance_type: Specifies the instance type. :paramtype instance_type: str """ super(MaterializationComputeResource, self).__init__(**kwargs) - self.instance_type = kwargs.get('instance_type', None) + self.instance_type = kwargs.get("instance_type", None) class MaterializationSettings(msrest.serialization.Model): @@ -22498,17 +21303,14 @@ class MaterializationSettings(msrest.serialization.Model): """ _attribute_map = { - 'notification': {'key': 'notification', 'type': 'NotificationSetting'}, - 'resource': {'key': 'resource', 'type': 'MaterializationComputeResource'}, - 'schedule': {'key': 'schedule', 'type': 'RecurrenceTrigger'}, - 'spark_configuration': {'key': 'sparkConfiguration', 'type': '{str}'}, - 'store_type': {'key': 'storeType', 'type': 'str'}, + "notification": {"key": "notification", "type": "NotificationSetting"}, + "resource": {"key": "resource", "type": "MaterializationComputeResource"}, + "schedule": {"key": "schedule", "type": "RecurrenceTrigger"}, + "spark_configuration": {"key": "sparkConfiguration", "type": "{str}"}, + "store_type": {"key": "storeType", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword notification: Specifies the notification details. :paramtype notification: ~azure.mgmt.machinelearningservices.models.NotificationSetting @@ -22524,11 +21326,11 @@ def __init__( ~azure.mgmt.machinelearningservices.models.MaterializationStoreType """ super(MaterializationSettings, self).__init__(**kwargs) - self.notification = kwargs.get('notification', None) - self.resource = kwargs.get('resource', None) - self.schedule = kwargs.get('schedule', None) - self.spark_configuration = kwargs.get('spark_configuration', None) - self.store_type = kwargs.get('store_type', None) + self.notification = kwargs.get("notification", None) + self.resource = kwargs.get("resource", None) + self.schedule = kwargs.get("schedule", None) + self.spark_configuration = kwargs.get("spark_configuration", None) + self.store_type = kwargs.get("store_type", None) class MedianStoppingPolicy(EarlyTerminationPolicy): @@ -22547,19 +21349,16 @@ class MedianStoppingPolicy(EarlyTerminationPolicy): """ _validation = { - 'policy_type': {'required': True}, + "policy_type": {"required": True}, } _attribute_map = { - 'delay_evaluation': {'key': 'delayEvaluation', 'type': 'int'}, - 'evaluation_interval': {'key': 'evaluationInterval', 'type': 'int'}, - 'policy_type': {'key': 'policyType', 'type': 'str'}, + "delay_evaluation": {"key": "delayEvaluation", "type": "int"}, + "evaluation_interval": {"key": "evaluationInterval", "type": "int"}, + "policy_type": {"key": "policyType", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword delay_evaluation: Number of intervals by which to delay the first evaluation. :paramtype delay_evaluation: int @@ -22567,7 +21366,7 @@ def __init__( :paramtype evaluation_interval: int """ super(MedianStoppingPolicy, self).__init__(**kwargs) - self.policy_type = 'MedianStopping' # type: str + self.policy_type = "MedianStopping" # type: str class MLAssistConfiguration(msrest.serialization.Model): @@ -22584,23 +21383,19 @@ class MLAssistConfiguration(msrest.serialization.Model): """ _validation = { - 'ml_assist': {'required': True}, + "ml_assist": {"required": True}, } _attribute_map = { - 'ml_assist': {'key': 'mlAssist', 'type': 'str'}, + "ml_assist": {"key": "mlAssist", "type": "str"}, } _subtype_map = { - 'ml_assist': {'Disabled': 'MLAssistConfigurationDisabled', 'Enabled': 'MLAssistConfigurationEnabled'} + "ml_assist": {"Disabled": "MLAssistConfigurationDisabled", "Enabled": "MLAssistConfigurationEnabled"} } - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(MLAssistConfiguration, self).__init__(**kwargs) self.ml_assist = None # type: Optional[str] @@ -22616,21 +21411,17 @@ class MLAssistConfigurationDisabled(MLAssistConfiguration): """ _validation = { - 'ml_assist': {'required': True}, + "ml_assist": {"required": True}, } _attribute_map = { - 'ml_assist': {'key': 'mlAssist', 'type': 'str'}, + "ml_assist": {"key": "mlAssist", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(MLAssistConfigurationDisabled, self).__init__(**kwargs) - self.ml_assist = 'Disabled' # type: str + self.ml_assist = "Disabled" # type: str class MLAssistConfigurationEnabled(MLAssistConfiguration): @@ -22649,21 +21440,18 @@ class MLAssistConfigurationEnabled(MLAssistConfiguration): """ _validation = { - 'ml_assist': {'required': True}, - 'inferencing_compute_binding': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'training_compute_binding': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "ml_assist": {"required": True}, + "inferencing_compute_binding": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "training_compute_binding": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'ml_assist': {'key': 'mlAssist', 'type': 'str'}, - 'inferencing_compute_binding': {'key': 'inferencingComputeBinding', 'type': 'str'}, - 'training_compute_binding': {'key': 'trainingComputeBinding', 'type': 'str'}, + "ml_assist": {"key": "mlAssist", "type": "str"}, + "inferencing_compute_binding": {"key": "inferencingComputeBinding", "type": "str"}, + "training_compute_binding": {"key": "trainingComputeBinding", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword inferencing_compute_binding: Required. [Required] AML compute binding used in inferencing. @@ -22672,9 +21460,9 @@ def __init__( :paramtype training_compute_binding: str """ super(MLAssistConfigurationEnabled, self).__init__(**kwargs) - self.ml_assist = 'Enabled' # type: str - self.inferencing_compute_binding = kwargs['inferencing_compute_binding'] - self.training_compute_binding = kwargs['training_compute_binding'] + self.ml_assist = "Enabled" # type: str + self.inferencing_compute_binding = kwargs["inferencing_compute_binding"] + self.training_compute_binding = kwargs["training_compute_binding"] class MLFlowModelJobInput(JobInput, AssetJobInput): @@ -22698,22 +21486,19 @@ class MLFlowModelJobInput(JobInput, AssetJobInput): """ _validation = { - 'uri': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'job_input_type': {'required': True}, + "uri": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "job_input_type": {"required": True}, } _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - 'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'}, - 'uri': {'key': 'uri', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'job_input_type': {'key': 'jobInputType', 'type': 'str'}, + "mode": {"key": "mode", "type": "str"}, + "path_on_compute": {"key": "pathOnCompute", "type": "str"}, + "uri": {"key": "uri", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "job_input_type": {"key": "jobInputType", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword mode: Input Asset Delivery Mode. Possible values include: "ReadOnlyMount", "ReadWriteMount", "Download", "Direct", "EvalMount", "EvalDownload". @@ -22726,11 +21511,11 @@ def __init__( :paramtype description: str """ super(MLFlowModelJobInput, self).__init__(**kwargs) - self.mode = kwargs.get('mode', None) - self.path_on_compute = kwargs.get('path_on_compute', None) - self.uri = kwargs['uri'] - self.job_input_type = 'mlflow_model' # type: str - self.description = kwargs.get('description', None) + self.mode = kwargs.get("mode", None) + self.path_on_compute = kwargs.get("path_on_compute", None) + self.uri = kwargs["uri"] + self.job_input_type = "mlflow_model" # type: str + self.description = kwargs.get("description", None) class MLFlowModelJobOutput(JobOutput, AssetJobOutput): @@ -22760,24 +21545,21 @@ class MLFlowModelJobOutput(JobOutput, AssetJobOutput): """ _validation = { - 'job_output_type': {'required': True}, + "job_output_type": {"required": True}, } _attribute_map = { - 'asset_name': {'key': 'assetName', 'type': 'str'}, - 'asset_version': {'key': 'assetVersion', 'type': 'str'}, - 'auto_delete_setting': {'key': 'autoDeleteSetting', 'type': 'AutoDeleteSetting'}, - 'mode': {'key': 'mode', 'type': 'str'}, - 'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'}, - 'uri': {'key': 'uri', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'job_output_type': {'key': 'jobOutputType', 'type': 'str'}, + "asset_name": {"key": "assetName", "type": "str"}, + "asset_version": {"key": "assetVersion", "type": "str"}, + "auto_delete_setting": {"key": "autoDeleteSetting", "type": "AutoDeleteSetting"}, + "mode": {"key": "mode", "type": "str"}, + "path_on_compute": {"key": "pathOnCompute", "type": "str"}, + "uri": {"key": "uri", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "job_output_type": {"key": "jobOutputType", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword asset_name: Output Asset Name. :paramtype asset_name: str @@ -22796,14 +21578,14 @@ def __init__( :paramtype description: str """ super(MLFlowModelJobOutput, self).__init__(**kwargs) - self.asset_name = kwargs.get('asset_name', None) - self.asset_version = kwargs.get('asset_version', None) - self.auto_delete_setting = kwargs.get('auto_delete_setting', None) - self.mode = kwargs.get('mode', None) - self.path_on_compute = kwargs.get('path_on_compute', None) - self.uri = kwargs.get('uri', None) - self.job_output_type = 'mlflow_model' # type: str - self.description = kwargs.get('description', None) + self.asset_name = kwargs.get("asset_name", None) + self.asset_version = kwargs.get("asset_version", None) + self.auto_delete_setting = kwargs.get("auto_delete_setting", None) + self.mode = kwargs.get("mode", None) + self.path_on_compute = kwargs.get("path_on_compute", None) + self.uri = kwargs.get("uri", None) + self.job_output_type = "mlflow_model" # type: str + self.description = kwargs.get("description", None) class MLTableData(DataVersionBaseProperties): @@ -22841,28 +21623,25 @@ class MLTableData(DataVersionBaseProperties): """ _validation = { - 'data_type': {'required': True}, - 'data_uri': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "data_type": {"required": True}, + "data_uri": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'auto_delete_setting': {'key': 'autoDeleteSetting', 'type': 'AutoDeleteSetting'}, - 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'data_uri': {'key': 'dataUri', 'type': 'str'}, - 'intellectual_property': {'key': 'intellectualProperty', 'type': 'IntellectualProperty'}, - 'stage': {'key': 'stage', 'type': 'str'}, - 'referenced_uris': {'key': 'referencedUris', 'type': '[str]'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "auto_delete_setting": {"key": "autoDeleteSetting", "type": "AutoDeleteSetting"}, + "is_anonymous": {"key": "isAnonymous", "type": "bool"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "data_type": {"key": "dataType", "type": "str"}, + "data_uri": {"key": "dataUri", "type": "str"}, + "intellectual_property": {"key": "intellectualProperty", "type": "IntellectualProperty"}, + "stage": {"key": "stage", "type": "str"}, + "referenced_uris": {"key": "referencedUris", "type": "[str]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword description: The asset description text. :paramtype description: str @@ -22891,8 +21670,8 @@ def __init__( :paramtype referenced_uris: list[str] """ super(MLTableData, self).__init__(**kwargs) - self.data_type = 'mltable' # type: str - self.referenced_uris = kwargs.get('referenced_uris', None) + self.data_type = "mltable" # type: str + self.referenced_uris = kwargs.get("referenced_uris", None) class MLTableJobInput(JobInput, AssetJobInput): @@ -22916,22 +21695,19 @@ class MLTableJobInput(JobInput, AssetJobInput): """ _validation = { - 'uri': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'job_input_type': {'required': True}, + "uri": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "job_input_type": {"required": True}, } _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - 'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'}, - 'uri': {'key': 'uri', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'job_input_type': {'key': 'jobInputType', 'type': 'str'}, + "mode": {"key": "mode", "type": "str"}, + "path_on_compute": {"key": "pathOnCompute", "type": "str"}, + "uri": {"key": "uri", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "job_input_type": {"key": "jobInputType", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword mode: Input Asset Delivery Mode. Possible values include: "ReadOnlyMount", "ReadWriteMount", "Download", "Direct", "EvalMount", "EvalDownload". @@ -22944,11 +21720,11 @@ def __init__( :paramtype description: str """ super(MLTableJobInput, self).__init__(**kwargs) - self.mode = kwargs.get('mode', None) - self.path_on_compute = kwargs.get('path_on_compute', None) - self.uri = kwargs['uri'] - self.job_input_type = 'mltable' # type: str - self.description = kwargs.get('description', None) + self.mode = kwargs.get("mode", None) + self.path_on_compute = kwargs.get("path_on_compute", None) + self.uri = kwargs["uri"] + self.job_input_type = "mltable" # type: str + self.description = kwargs.get("description", None) class MLTableJobOutput(JobOutput, AssetJobOutput): @@ -22978,24 +21754,21 @@ class MLTableJobOutput(JobOutput, AssetJobOutput): """ _validation = { - 'job_output_type': {'required': True}, + "job_output_type": {"required": True}, } _attribute_map = { - 'asset_name': {'key': 'assetName', 'type': 'str'}, - 'asset_version': {'key': 'assetVersion', 'type': 'str'}, - 'auto_delete_setting': {'key': 'autoDeleteSetting', 'type': 'AutoDeleteSetting'}, - 'mode': {'key': 'mode', 'type': 'str'}, - 'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'}, - 'uri': {'key': 'uri', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'job_output_type': {'key': 'jobOutputType', 'type': 'str'}, + "asset_name": {"key": "assetName", "type": "str"}, + "asset_version": {"key": "assetVersion", "type": "str"}, + "auto_delete_setting": {"key": "autoDeleteSetting", "type": "AutoDeleteSetting"}, + "mode": {"key": "mode", "type": "str"}, + "path_on_compute": {"key": "pathOnCompute", "type": "str"}, + "uri": {"key": "uri", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "job_output_type": {"key": "jobOutputType", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword asset_name: Output Asset Name. :paramtype asset_name: str @@ -23014,14 +21787,14 @@ def __init__( :paramtype description: str """ super(MLTableJobOutput, self).__init__(**kwargs) - self.asset_name = kwargs.get('asset_name', None) - self.asset_version = kwargs.get('asset_version', None) - self.auto_delete_setting = kwargs.get('auto_delete_setting', None) - self.mode = kwargs.get('mode', None) - self.path_on_compute = kwargs.get('path_on_compute', None) - self.uri = kwargs.get('uri', None) - self.job_output_type = 'mltable' # type: str - self.description = kwargs.get('description', None) + self.asset_name = kwargs.get("asset_name", None) + self.asset_version = kwargs.get("asset_version", None) + self.auto_delete_setting = kwargs.get("auto_delete_setting", None) + self.mode = kwargs.get("mode", None) + self.path_on_compute = kwargs.get("path_on_compute", None) + self.uri = kwargs.get("uri", None) + self.job_output_type = "mltable" # type: str + self.description = kwargs.get("description", None) class ModelConfiguration(msrest.serialization.Model): @@ -23034,14 +21807,11 @@ class ModelConfiguration(msrest.serialization.Model): """ _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - 'mount_path': {'key': 'mountPath', 'type': 'str'}, + "mode": {"key": "mode", "type": "str"}, + "mount_path": {"key": "mountPath", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword mode: Input delivery mode for the model. Possible values include: "Copy", "Download". :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.PackageInputDeliveryMode @@ -23049,8 +21819,8 @@ def __init__( :paramtype mount_path: str """ super(ModelConfiguration, self).__init__(**kwargs) - self.mode = kwargs.get('mode', None) - self.mount_path = kwargs.get('mount_path', None) + self.mode = kwargs.get("mode", None) + self.mount_path = kwargs.get("mount_path", None) class ModelContainer(ProxyResource): @@ -23076,31 +21846,28 @@ class ModelContainer(ProxyResource): """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'properties': {'key': 'properties', 'type': 'ModelContainerProperties'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "ModelContainerProperties"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword properties: Required. [Required] Additional attributes of the entity. :paramtype properties: ~azure.mgmt.machinelearningservices.models.ModelContainerProperties """ super(ModelContainer, self).__init__(**kwargs) - self.properties = kwargs['properties'] + self.properties = kwargs["properties"] class ModelContainerProperties(AssetContainer): @@ -23127,25 +21894,22 @@ class ModelContainerProperties(AssetContainer): """ _validation = { - 'latest_version': {'readonly': True}, - 'next_version': {'readonly': True}, - 'provisioning_state': {'readonly': True}, + "latest_version": {"readonly": True}, + "next_version": {"readonly": True}, + "provisioning_state": {"readonly": True}, } _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'latest_version': {'key': 'latestVersion', 'type': 'str'}, - 'next_version': {'key': 'nextVersion', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "latest_version": {"key": "latestVersion", "type": "str"}, + "next_version": {"key": "nextVersion", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword description: The asset description text. :paramtype description: str @@ -23171,14 +21935,11 @@ class ModelContainerResourceArmPaginatedResult(msrest.serialization.Model): """ _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[ModelContainer]'}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[ModelContainer]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword next_link: The link to the next page of ModelContainer objects. If null, there are no additional pages. @@ -23187,8 +21948,8 @@ def __init__( :paramtype value: list[~azure.mgmt.machinelearningservices.models.ModelContainer] """ super(ModelContainerResourceArmPaginatedResult, self).__init__(**kwargs) - self.next_link = kwargs.get('next_link', None) - self.value = kwargs.get('value', None) + self.next_link = kwargs.get("next_link", None) + self.value = kwargs.get("value", None) class ModelDeprecationInfo(msrest.serialization.Model): @@ -23201,14 +21962,11 @@ class ModelDeprecationInfo(msrest.serialization.Model): """ _attribute_map = { - 'fine_tune': {'key': 'fineTune', 'type': 'str'}, - 'inference': {'key': 'inference', 'type': 'str'}, + "fine_tune": {"key": "fineTune", "type": "str"}, + "inference": {"key": "inference", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword fine_tune: The datetime of deprecation of the fineTune Model. :paramtype fine_tune: str @@ -23216,8 +21974,8 @@ def __init__( :paramtype inference: str """ super(ModelDeprecationInfo, self).__init__(**kwargs) - self.fine_tune = kwargs.get('fine_tune', None) - self.inference = kwargs.get('inference', None) + self.fine_tune = kwargs.get("fine_tune", None) + self.inference = kwargs.get("inference", None) class ModelPackageInput(msrest.serialization.Model): @@ -23237,21 +21995,18 @@ class ModelPackageInput(msrest.serialization.Model): """ _validation = { - 'input_type': {'required': True}, - 'path': {'required': True}, + "input_type": {"required": True}, + "path": {"required": True}, } _attribute_map = { - 'input_type': {'key': 'inputType', 'type': 'str'}, - 'mode': {'key': 'mode', 'type': 'str'}, - 'mount_path': {'key': 'mountPath', 'type': 'str'}, - 'path': {'key': 'path', 'type': 'PackageInputPathBase'}, + "input_type": {"key": "inputType", "type": "str"}, + "mode": {"key": "mode", "type": "str"}, + "mount_path": {"key": "mountPath", "type": "str"}, + "path": {"key": "path", "type": "PackageInputPathBase"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword input_type: Required. [Required] Type of the input included in the target image. Possible values include: "UriFile", "UriFolder". @@ -23264,10 +22019,10 @@ def __init__( :paramtype path: ~azure.mgmt.machinelearningservices.models.PackageInputPathBase """ super(ModelPackageInput, self).__init__(**kwargs) - self.input_type = kwargs['input_type'] - self.mode = kwargs.get('mode', None) - self.mount_path = kwargs.get('mount_path', None) - self.path = kwargs['path'] + self.input_type = kwargs["input_type"] + self.mode = kwargs.get("mode", None) + self.mount_path = kwargs.get("mount_path", None) + self.path = kwargs["path"] class ModelPerformanceSignal(MonitoringSignalBase): @@ -23301,26 +22056,23 @@ class ModelPerformanceSignal(MonitoringSignalBase): """ _validation = { - 'signal_type': {'required': True}, - 'metric_threshold': {'required': True}, - 'production_data': {'required': True}, - 'reference_data': {'required': True}, + "signal_type": {"required": True}, + "metric_threshold": {"required": True}, + "production_data": {"required": True}, + "reference_data": {"required": True}, } _attribute_map = { - 'notification_types': {'key': 'notificationTypes', 'type': '[str]'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'signal_type': {'key': 'signalType', 'type': 'str'}, - 'data_segment': {'key': 'dataSegment', 'type': 'MonitoringDataSegment'}, - 'metric_threshold': {'key': 'metricThreshold', 'type': 'ModelPerformanceMetricThresholdBase'}, - 'production_data': {'key': 'productionData', 'type': '[MonitoringInputDataBase]'}, - 'reference_data': {'key': 'referenceData', 'type': 'MonitoringInputDataBase'}, + "notification_types": {"key": "notificationTypes", "type": "[str]"}, + "properties": {"key": "properties", "type": "{str}"}, + "signal_type": {"key": "signalType", "type": "str"}, + "data_segment": {"key": "dataSegment", "type": "MonitoringDataSegment"}, + "metric_threshold": {"key": "metricThreshold", "type": "ModelPerformanceMetricThresholdBase"}, + "production_data": {"key": "productionData", "type": "[MonitoringInputDataBase]"}, + "reference_data": {"key": "referenceData", "type": "MonitoringInputDataBase"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword notification_types: The current notification mode for this signal. :paramtype notification_types: list[str or @@ -23342,11 +22094,11 @@ def __init__( :paramtype reference_data: ~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase """ super(ModelPerformanceSignal, self).__init__(**kwargs) - self.signal_type = 'ModelPerformance' # type: str - self.data_segment = kwargs.get('data_segment', None) - self.metric_threshold = kwargs['metric_threshold'] - self.production_data = kwargs['production_data'] - self.reference_data = kwargs['reference_data'] + self.signal_type = "ModelPerformance" # type: str + self.data_segment = kwargs.get("data_segment", None) + self.metric_threshold = kwargs["metric_threshold"] + self.production_data = kwargs["production_data"] + self.reference_data = kwargs["reference_data"] class ModelSettings(msrest.serialization.Model): @@ -23359,23 +22111,20 @@ class ModelSettings(msrest.serialization.Model): """ _validation = { - 'model_id': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "model_id": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'model_id': {'key': 'modelId', 'type': 'str'}, + "model_id": {"key": "modelId", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword model_id: Required. [Required]. :paramtype model_id: str """ super(ModelSettings, self).__init__(**kwargs) - self.model_id = kwargs['model_id'] + self.model_id = kwargs["model_id"] class ModelSku(msrest.serialization.Model): @@ -23394,17 +22143,14 @@ class ModelSku(msrest.serialization.Model): """ _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'usage_name': {'key': 'usageName', 'type': 'str'}, - 'deprecation_date': {'key': 'deprecationDate', 'type': 'iso-8601'}, - 'capacity': {'key': 'capacity', 'type': 'CapacityConfig'}, - 'rate_limits': {'key': 'rateLimits', 'type': '[CallRateLimit]'}, + "name": {"key": "name", "type": "str"}, + "usage_name": {"key": "usageName", "type": "str"}, + "deprecation_date": {"key": "deprecationDate", "type": "iso-8601"}, + "capacity": {"key": "capacity", "type": "CapacityConfig"}, + "rate_limits": {"key": "rateLimits", "type": "[CallRateLimit]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword name: The name of the model SKU. :paramtype name: str @@ -23418,11 +22164,11 @@ def __init__( :paramtype rate_limits: list[~azure.mgmt.machinelearningservices.models.CallRateLimit] """ super(ModelSku, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - self.usage_name = kwargs.get('usage_name', None) - self.deprecation_date = kwargs.get('deprecation_date', None) - self.capacity = kwargs.get('capacity', None) - self.rate_limits = kwargs.get('rate_limits', None) + self.name = kwargs.get("name", None) + self.usage_name = kwargs.get("usage_name", None) + self.deprecation_date = kwargs.get("deprecation_date", None) + self.capacity = kwargs.get("capacity", None) + self.rate_limits = kwargs.get("rate_limits", None) class ModelVersion(ProxyResource): @@ -23448,31 +22194,28 @@ class ModelVersion(ProxyResource): """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'properties': {'key': 'properties', 'type': 'ModelVersionProperties'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "ModelVersionProperties"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword properties: Required. [Required] Additional attributes of the entity. :paramtype properties: ~azure.mgmt.machinelearningservices.models.ModelVersionProperties """ super(ModelVersion, self).__init__(**kwargs) - self.properties = kwargs['properties'] + self.properties = kwargs["properties"] class ModelVersionProperties(AssetBase): @@ -23514,29 +22257,26 @@ class ModelVersionProperties(AssetBase): """ _validation = { - 'provisioning_state': {'readonly': True}, + "provisioning_state": {"readonly": True}, } _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'auto_delete_setting': {'key': 'autoDeleteSetting', 'type': 'AutoDeleteSetting'}, - 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'flavors': {'key': 'flavors', 'type': '{FlavorData}'}, - 'intellectual_property': {'key': 'intellectualProperty', 'type': 'IntellectualProperty'}, - 'job_name': {'key': 'jobName', 'type': 'str'}, - 'model_type': {'key': 'modelType', 'type': 'str'}, - 'model_uri': {'key': 'modelUri', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'stage': {'key': 'stage', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "auto_delete_setting": {"key": "autoDeleteSetting", "type": "AutoDeleteSetting"}, + "is_anonymous": {"key": "isAnonymous", "type": "bool"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "flavors": {"key": "flavors", "type": "{FlavorData}"}, + "intellectual_property": {"key": "intellectualProperty", "type": "IntellectualProperty"}, + "job_name": {"key": "jobName", "type": "str"}, + "model_type": {"key": "modelType", "type": "str"}, + "model_uri": {"key": "modelUri", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "stage": {"key": "stage", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword description: The asset description text. :paramtype description: str @@ -23568,13 +22308,13 @@ def __init__( :paramtype stage: str """ super(ModelVersionProperties, self).__init__(**kwargs) - self.flavors = kwargs.get('flavors', None) - self.intellectual_property = kwargs.get('intellectual_property', None) - self.job_name = kwargs.get('job_name', None) - self.model_type = kwargs.get('model_type', None) - self.model_uri = kwargs.get('model_uri', None) + self.flavors = kwargs.get("flavors", None) + self.intellectual_property = kwargs.get("intellectual_property", None) + self.job_name = kwargs.get("job_name", None) + self.model_type = kwargs.get("model_type", None) + self.model_uri = kwargs.get("model_uri", None) self.provisioning_state = None - self.stage = kwargs.get('stage', None) + self.stage = kwargs.get("stage", None) class ModelVersionResourceArmPaginatedResult(msrest.serialization.Model): @@ -23588,14 +22328,11 @@ class ModelVersionResourceArmPaginatedResult(msrest.serialization.Model): """ _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[ModelVersion]'}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[ModelVersion]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword next_link: The link to the next page of ModelVersion objects. If null, there are no additional pages. @@ -23604,8 +22341,8 @@ def __init__( :paramtype value: list[~azure.mgmt.machinelearningservices.models.ModelVersion] """ super(ModelVersionResourceArmPaginatedResult, self).__init__(**kwargs) - self.next_link = kwargs.get('next_link', None) - self.value = kwargs.get('value', None) + self.next_link = kwargs.get("next_link", None) + self.value = kwargs.get("value", None) class MonitorComputeConfigurationBase(msrest.serialization.Model): @@ -23622,23 +22359,17 @@ class MonitorComputeConfigurationBase(msrest.serialization.Model): """ _validation = { - 'compute_type': {'required': True}, + "compute_type": {"required": True}, } _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, + "compute_type": {"key": "computeType", "type": "str"}, } - _subtype_map = { - 'compute_type': {'ServerlessSpark': 'MonitorServerlessSparkCompute'} - } + _subtype_map = {"compute_type": {"ServerlessSpark": "MonitorServerlessSparkCompute"}} - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(MonitorComputeConfigurationBase, self).__init__(**kwargs) self.compute_type = None # type: Optional[str] @@ -23663,21 +22394,18 @@ class MonitorDefinition(msrest.serialization.Model): """ _validation = { - 'compute_configuration': {'required': True}, - 'signals': {'required': True}, + "compute_configuration": {"required": True}, + "signals": {"required": True}, } _attribute_map = { - 'alert_notification_settings': {'key': 'alertNotificationSettings', 'type': 'MonitorNotificationSettings'}, - 'compute_configuration': {'key': 'computeConfiguration', 'type': 'MonitorComputeConfigurationBase'}, - 'monitoring_target': {'key': 'monitoringTarget', 'type': 'MonitoringTarget'}, - 'signals': {'key': 'signals', 'type': '{MonitoringSignalBase}'}, + "alert_notification_settings": {"key": "alertNotificationSettings", "type": "MonitorNotificationSettings"}, + "compute_configuration": {"key": "computeConfiguration", "type": "MonitorComputeConfigurationBase"}, + "monitoring_target": {"key": "monitoringTarget", "type": "MonitoringTarget"}, + "signals": {"key": "signals", "type": "{MonitoringSignalBase}"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword alert_notification_settings: The monitor's notification settings. :paramtype alert_notification_settings: @@ -23693,10 +22421,10 @@ def __init__( :paramtype signals: dict[str, ~azure.mgmt.machinelearningservices.models.MonitoringSignalBase] """ super(MonitorDefinition, self).__init__(**kwargs) - self.alert_notification_settings = kwargs.get('alert_notification_settings', None) - self.compute_configuration = kwargs['compute_configuration'] - self.monitoring_target = kwargs.get('monitoring_target', None) - self.signals = kwargs['signals'] + self.alert_notification_settings = kwargs.get("alert_notification_settings", None) + self.compute_configuration = kwargs["compute_configuration"] + self.monitoring_target = kwargs.get("monitoring_target", None) + self.signals = kwargs["signals"] class MonitorEmailNotificationSettings(msrest.serialization.Model): @@ -23708,20 +22436,17 @@ class MonitorEmailNotificationSettings(msrest.serialization.Model): """ _attribute_map = { - 'emails': {'key': 'emails', 'type': '[str]'}, + "emails": {"key": "emails", "type": "[str]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword emails: This is the email recipient list which has a limitation of 499 characters in total. :paramtype emails: list[str] """ super(MonitorEmailNotificationSettings, self).__init__(**kwargs) - self.emails = kwargs.get('emails', None) + self.emails = kwargs.get("emails", None) class MonitoringDataSegment(msrest.serialization.Model): @@ -23734,14 +22459,11 @@ class MonitoringDataSegment(msrest.serialization.Model): """ _attribute_map = { - 'feature': {'key': 'feature', 'type': 'str'}, - 'values': {'key': 'values', 'type': '[str]'}, + "feature": {"key": "feature", "type": "str"}, + "values": {"key": "values", "type": "[str]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword feature: The feature to segment the data on. :paramtype feature: str @@ -23749,8 +22471,8 @@ def __init__( :paramtype values: list[str] """ super(MonitoringDataSegment, self).__init__(**kwargs) - self.feature = kwargs.get('feature', None) - self.values = kwargs.get('values', None) + self.feature = kwargs.get("feature", None) + self.values = kwargs.get("values", None) class MonitoringTarget(msrest.serialization.Model): @@ -23768,19 +22490,16 @@ class MonitoringTarget(msrest.serialization.Model): """ _validation = { - 'task_type': {'required': True}, + "task_type": {"required": True}, } _attribute_map = { - 'deployment_id': {'key': 'deploymentId', 'type': 'str'}, - 'model_id': {'key': 'modelId', 'type': 'str'}, - 'task_type': {'key': 'taskType', 'type': 'str'}, + "deployment_id": {"key": "deploymentId", "type": "str"}, + "model_id": {"key": "modelId", "type": "str"}, + "task_type": {"key": "taskType", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword deployment_id: The ARM resource ID of either the deployment targeted by this monitor. :paramtype deployment_id: str @@ -23791,9 +22510,9 @@ def __init__( :paramtype task_type: str or ~azure.mgmt.machinelearningservices.models.ModelTaskType """ super(MonitoringTarget, self).__init__(**kwargs) - self.deployment_id = kwargs.get('deployment_id', None) - self.model_id = kwargs.get('model_id', None) - self.task_type = kwargs['task_type'] + self.deployment_id = kwargs.get("deployment_id", None) + self.model_id = kwargs.get("model_id", None) + self.task_type = kwargs["task_type"] class MonitoringThreshold(msrest.serialization.Model): @@ -23804,19 +22523,16 @@ class MonitoringThreshold(msrest.serialization.Model): """ _attribute_map = { - 'value': {'key': 'value', 'type': 'float'}, + "value": {"key": "value", "type": "float"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword value: The threshold value. If null, the set default is dependent on the metric type. :paramtype value: float """ super(MonitoringThreshold, self).__init__(**kwargs) - self.value = kwargs.get('value', None) + self.value = kwargs.get("value", None) class MonitoringWorkspaceConnection(msrest.serialization.Model): @@ -23833,14 +22549,11 @@ class MonitoringWorkspaceConnection(msrest.serialization.Model): """ _attribute_map = { - 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, - 'secrets': {'key': 'secrets', 'type': '{str}'}, + "environment_variables": {"key": "environmentVariables", "type": "{str}"}, + "secrets": {"key": "secrets", "type": "{str}"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword environment_variables: The properties of a workspace service connection to store as environment variables in the submitted jobs. @@ -23852,8 +22565,8 @@ def __init__( :paramtype secrets: dict[str, str] """ super(MonitoringWorkspaceConnection, self).__init__(**kwargs) - self.environment_variables = kwargs.get('environment_variables', None) - self.secrets = kwargs.get('secrets', None) + self.environment_variables = kwargs.get("environment_variables", None) + self.secrets = kwargs.get("secrets", None) class MonitorNotificationSettings(msrest.serialization.Model): @@ -23865,20 +22578,17 @@ class MonitorNotificationSettings(msrest.serialization.Model): """ _attribute_map = { - 'email_notification_settings': {'key': 'emailNotificationSettings', 'type': 'MonitorEmailNotificationSettings'}, + "email_notification_settings": {"key": "emailNotificationSettings", "type": "MonitorEmailNotificationSettings"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword email_notification_settings: The AML notification email settings. :paramtype email_notification_settings: ~azure.mgmt.machinelearningservices.models.MonitorEmailNotificationSettings """ super(MonitorNotificationSettings, self).__init__(**kwargs) - self.email_notification_settings = kwargs.get('email_notification_settings', None) + self.email_notification_settings = kwargs.get("email_notification_settings", None) class MonitorServerlessSparkCompute(MonitorComputeConfigurationBase): @@ -23900,23 +22610,20 @@ class MonitorServerlessSparkCompute(MonitorComputeConfigurationBase): """ _validation = { - 'compute_type': {'required': True}, - 'compute_identity': {'required': True}, - 'instance_type': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'runtime_version': {'required': True, 'min_length': 1, 'pattern': r'^[0-9]+\.[0-9]+$'}, + "compute_type": {"required": True}, + "compute_identity": {"required": True}, + "instance_type": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "runtime_version": {"required": True, "min_length": 1, "pattern": r"^[0-9]+\.[0-9]+$"}, } _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_identity': {'key': 'computeIdentity', 'type': 'MonitorComputeIdentityBase'}, - 'instance_type': {'key': 'instanceType', 'type': 'str'}, - 'runtime_version': {'key': 'runtimeVersion', 'type': 'str'}, + "compute_type": {"key": "computeType", "type": "str"}, + "compute_identity": {"key": "computeIdentity", "type": "MonitorComputeIdentityBase"}, + "instance_type": {"key": "instanceType", "type": "str"}, + "runtime_version": {"key": "runtimeVersion", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword compute_identity: Required. [Required] The identity scheme leveraged to by the spark jobs running on serverless Spark. @@ -23928,10 +22635,10 @@ def __init__( :paramtype runtime_version: str """ super(MonitorServerlessSparkCompute, self).__init__(**kwargs) - self.compute_type = 'ServerlessSpark' # type: str - self.compute_identity = kwargs['compute_identity'] - self.instance_type = kwargs['instance_type'] - self.runtime_version = kwargs['runtime_version'] + self.compute_type = "ServerlessSpark" # type: str + self.compute_identity = kwargs["compute_identity"] + self.instance_type = kwargs["instance_type"] + self.runtime_version = kwargs["runtime_version"] class Mpi(DistributionConfiguration): @@ -23948,25 +22655,22 @@ class Mpi(DistributionConfiguration): """ _validation = { - 'distribution_type': {'required': True}, + "distribution_type": {"required": True}, } _attribute_map = { - 'distribution_type': {'key': 'distributionType', 'type': 'str'}, - 'process_count_per_instance': {'key': 'processCountPerInstance', 'type': 'int'}, + "distribution_type": {"key": "distributionType", "type": "str"}, + "process_count_per_instance": {"key": "processCountPerInstance", "type": "int"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword process_count_per_instance: Number of processes per MPI node. :paramtype process_count_per_instance: int """ super(Mpi, self).__init__(**kwargs) - self.distribution_type = 'Mpi' # type: str - self.process_count_per_instance = kwargs.get('process_count_per_instance', None) + self.distribution_type = "Mpi" # type: str + self.process_count_per_instance = kwargs.get("process_count_per_instance", None) class NlpFixedParameters(msrest.serialization.Model): @@ -23997,21 +22701,18 @@ class NlpFixedParameters(msrest.serialization.Model): """ _attribute_map = { - 'gradient_accumulation_steps': {'key': 'gradientAccumulationSteps', 'type': 'int'}, - 'learning_rate': {'key': 'learningRate', 'type': 'float'}, - 'learning_rate_scheduler': {'key': 'learningRateScheduler', 'type': 'str'}, - 'model_name': {'key': 'modelName', 'type': 'str'}, - 'number_of_epochs': {'key': 'numberOfEpochs', 'type': 'int'}, - 'training_batch_size': {'key': 'trainingBatchSize', 'type': 'int'}, - 'validation_batch_size': {'key': 'validationBatchSize', 'type': 'int'}, - 'warmup_ratio': {'key': 'warmupRatio', 'type': 'float'}, - 'weight_decay': {'key': 'weightDecay', 'type': 'float'}, + "gradient_accumulation_steps": {"key": "gradientAccumulationSteps", "type": "int"}, + "learning_rate": {"key": "learningRate", "type": "float"}, + "learning_rate_scheduler": {"key": "learningRateScheduler", "type": "str"}, + "model_name": {"key": "modelName", "type": "str"}, + "number_of_epochs": {"key": "numberOfEpochs", "type": "int"}, + "training_batch_size": {"key": "trainingBatchSize", "type": "int"}, + "validation_batch_size": {"key": "validationBatchSize", "type": "int"}, + "warmup_ratio": {"key": "warmupRatio", "type": "float"}, + "weight_decay": {"key": "weightDecay", "type": "float"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword gradient_accumulation_steps: Number of steps to accumulate gradients over before running a backward pass. @@ -24037,15 +22738,15 @@ def __init__( :paramtype weight_decay: float """ super(NlpFixedParameters, self).__init__(**kwargs) - self.gradient_accumulation_steps = kwargs.get('gradient_accumulation_steps', None) - self.learning_rate = kwargs.get('learning_rate', None) - self.learning_rate_scheduler = kwargs.get('learning_rate_scheduler', None) - self.model_name = kwargs.get('model_name', None) - self.number_of_epochs = kwargs.get('number_of_epochs', None) - self.training_batch_size = kwargs.get('training_batch_size', None) - self.validation_batch_size = kwargs.get('validation_batch_size', None) - self.warmup_ratio = kwargs.get('warmup_ratio', None) - self.weight_decay = kwargs.get('weight_decay', None) + self.gradient_accumulation_steps = kwargs.get("gradient_accumulation_steps", None) + self.learning_rate = kwargs.get("learning_rate", None) + self.learning_rate_scheduler = kwargs.get("learning_rate_scheduler", None) + self.model_name = kwargs.get("model_name", None) + self.number_of_epochs = kwargs.get("number_of_epochs", None) + self.training_batch_size = kwargs.get("training_batch_size", None) + self.validation_batch_size = kwargs.get("validation_batch_size", None) + self.warmup_ratio = kwargs.get("warmup_ratio", None) + self.weight_decay = kwargs.get("weight_decay", None) class NlpParameterSubspace(msrest.serialization.Model): @@ -24074,21 +22775,18 @@ class NlpParameterSubspace(msrest.serialization.Model): """ _attribute_map = { - 'gradient_accumulation_steps': {'key': 'gradientAccumulationSteps', 'type': 'str'}, - 'learning_rate': {'key': 'learningRate', 'type': 'str'}, - 'learning_rate_scheduler': {'key': 'learningRateScheduler', 'type': 'str'}, - 'model_name': {'key': 'modelName', 'type': 'str'}, - 'number_of_epochs': {'key': 'numberOfEpochs', 'type': 'str'}, - 'training_batch_size': {'key': 'trainingBatchSize', 'type': 'str'}, - 'validation_batch_size': {'key': 'validationBatchSize', 'type': 'str'}, - 'warmup_ratio': {'key': 'warmupRatio', 'type': 'str'}, - 'weight_decay': {'key': 'weightDecay', 'type': 'str'}, + "gradient_accumulation_steps": {"key": "gradientAccumulationSteps", "type": "str"}, + "learning_rate": {"key": "learningRate", "type": "str"}, + "learning_rate_scheduler": {"key": "learningRateScheduler", "type": "str"}, + "model_name": {"key": "modelName", "type": "str"}, + "number_of_epochs": {"key": "numberOfEpochs", "type": "str"}, + "training_batch_size": {"key": "trainingBatchSize", "type": "str"}, + "validation_batch_size": {"key": "validationBatchSize", "type": "str"}, + "warmup_ratio": {"key": "warmupRatio", "type": "str"}, + "weight_decay": {"key": "weightDecay", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword gradient_accumulation_steps: Number of steps to accumulate gradients over before running a backward pass. @@ -24112,15 +22810,15 @@ def __init__( :paramtype weight_decay: str """ super(NlpParameterSubspace, self).__init__(**kwargs) - self.gradient_accumulation_steps = kwargs.get('gradient_accumulation_steps', None) - self.learning_rate = kwargs.get('learning_rate', None) - self.learning_rate_scheduler = kwargs.get('learning_rate_scheduler', None) - self.model_name = kwargs.get('model_name', None) - self.number_of_epochs = kwargs.get('number_of_epochs', None) - self.training_batch_size = kwargs.get('training_batch_size', None) - self.validation_batch_size = kwargs.get('validation_batch_size', None) - self.warmup_ratio = kwargs.get('warmup_ratio', None) - self.weight_decay = kwargs.get('weight_decay', None) + self.gradient_accumulation_steps = kwargs.get("gradient_accumulation_steps", None) + self.learning_rate = kwargs.get("learning_rate", None) + self.learning_rate_scheduler = kwargs.get("learning_rate_scheduler", None) + self.model_name = kwargs.get("model_name", None) + self.number_of_epochs = kwargs.get("number_of_epochs", None) + self.training_batch_size = kwargs.get("training_batch_size", None) + self.validation_batch_size = kwargs.get("validation_batch_size", None) + self.warmup_ratio = kwargs.get("warmup_ratio", None) + self.weight_decay = kwargs.get("weight_decay", None) class NlpSweepSettings(msrest.serialization.Model): @@ -24137,18 +22835,15 @@ class NlpSweepSettings(msrest.serialization.Model): """ _validation = { - 'sampling_algorithm': {'required': True}, + "sampling_algorithm": {"required": True}, } _attribute_map = { - 'early_termination': {'key': 'earlyTermination', 'type': 'EarlyTerminationPolicy'}, - 'sampling_algorithm': {'key': 'samplingAlgorithm', 'type': 'str'}, + "early_termination": {"key": "earlyTermination", "type": "EarlyTerminationPolicy"}, + "sampling_algorithm": {"key": "samplingAlgorithm", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword early_termination: Type of early termination policy for the sweeping job. :paramtype early_termination: ~azure.mgmt.machinelearningservices.models.EarlyTerminationPolicy @@ -24158,44 +22853,41 @@ def __init__( ~azure.mgmt.machinelearningservices.models.SamplingAlgorithmType """ super(NlpSweepSettings, self).__init__(**kwargs) - self.early_termination = kwargs.get('early_termination', None) - self.sampling_algorithm = kwargs['sampling_algorithm'] + self.early_termination = kwargs.get("early_termination", None) + self.sampling_algorithm = kwargs["sampling_algorithm"] class NlpVertical(msrest.serialization.Model): """Abstract class for NLP related AutoML tasks. -NLP - Natural Language Processing. + NLP - Natural Language Processing. - :ivar featurization_settings: Featurization inputs needed for AutoML job. - :vartype featurization_settings: - ~azure.mgmt.machinelearningservices.models.NlpVerticalFeaturizationSettings - :ivar fixed_parameters: Model/training parameters that will remain constant throughout - training. - :vartype fixed_parameters: ~azure.mgmt.machinelearningservices.models.NlpFixedParameters - :ivar limit_settings: Execution constraints for AutoMLJob. - :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.NlpVerticalLimitSettings - :ivar search_space: Search space for sampling different combinations of models and their - hyperparameters. - :vartype search_space: list[~azure.mgmt.machinelearningservices.models.NlpParameterSubspace] - :ivar sweep_settings: Settings for model sweeping and hyperparameter tuning. - :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.NlpSweepSettings - :ivar validation_data: Validation data inputs. - :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar featurization_settings: Featurization inputs needed for AutoML job. + :vartype featurization_settings: + ~azure.mgmt.machinelearningservices.models.NlpVerticalFeaturizationSettings + :ivar fixed_parameters: Model/training parameters that will remain constant throughout + training. + :vartype fixed_parameters: ~azure.mgmt.machinelearningservices.models.NlpFixedParameters + :ivar limit_settings: Execution constraints for AutoMLJob. + :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.NlpVerticalLimitSettings + :ivar search_space: Search space for sampling different combinations of models and their + hyperparameters. + :vartype search_space: list[~azure.mgmt.machinelearningservices.models.NlpParameterSubspace] + :ivar sweep_settings: Settings for model sweeping and hyperparameter tuning. + :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.NlpSweepSettings + :ivar validation_data: Validation data inputs. + :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput """ _attribute_map = { - 'featurization_settings': {'key': 'featurizationSettings', 'type': 'NlpVerticalFeaturizationSettings'}, - 'fixed_parameters': {'key': 'fixedParameters', 'type': 'NlpFixedParameters'}, - 'limit_settings': {'key': 'limitSettings', 'type': 'NlpVerticalLimitSettings'}, - 'search_space': {'key': 'searchSpace', 'type': '[NlpParameterSubspace]'}, - 'sweep_settings': {'key': 'sweepSettings', 'type': 'NlpSweepSettings'}, - 'validation_data': {'key': 'validationData', 'type': 'MLTableJobInput'}, + "featurization_settings": {"key": "featurizationSettings", "type": "NlpVerticalFeaturizationSettings"}, + "fixed_parameters": {"key": "fixedParameters", "type": "NlpFixedParameters"}, + "limit_settings": {"key": "limitSettings", "type": "NlpVerticalLimitSettings"}, + "search_space": {"key": "searchSpace", "type": "[NlpParameterSubspace]"}, + "sweep_settings": {"key": "sweepSettings", "type": "NlpSweepSettings"}, + "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword featurization_settings: Featurization inputs needed for AutoML job. :paramtype featurization_settings: @@ -24214,12 +22906,12 @@ def __init__( :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput """ super(NlpVertical, self).__init__(**kwargs) - self.featurization_settings = kwargs.get('featurization_settings', None) - self.fixed_parameters = kwargs.get('fixed_parameters', None) - self.limit_settings = kwargs.get('limit_settings', None) - self.search_space = kwargs.get('search_space', None) - self.sweep_settings = kwargs.get('sweep_settings', None) - self.validation_data = kwargs.get('validation_data', None) + self.featurization_settings = kwargs.get("featurization_settings", None) + self.fixed_parameters = kwargs.get("fixed_parameters", None) + self.limit_settings = kwargs.get("limit_settings", None) + self.search_space = kwargs.get("search_space", None) + self.sweep_settings = kwargs.get("sweep_settings", None) + self.validation_data = kwargs.get("validation_data", None) class NlpVerticalFeaturizationSettings(FeaturizationSettings): @@ -24230,13 +22922,10 @@ class NlpVerticalFeaturizationSettings(FeaturizationSettings): """ _attribute_map = { - 'dataset_language': {'key': 'datasetLanguage', 'type': 'str'}, + "dataset_language": {"key": "datasetLanguage", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword dataset_language: Dataset language, useful for the text data. :paramtype dataset_language: str @@ -24260,17 +22949,14 @@ class NlpVerticalLimitSettings(msrest.serialization.Model): """ _attribute_map = { - 'max_concurrent_trials': {'key': 'maxConcurrentTrials', 'type': 'int'}, - 'max_nodes': {'key': 'maxNodes', 'type': 'int'}, - 'max_trials': {'key': 'maxTrials', 'type': 'int'}, - 'timeout': {'key': 'timeout', 'type': 'duration'}, - 'trial_timeout': {'key': 'trialTimeout', 'type': 'duration'}, + "max_concurrent_trials": {"key": "maxConcurrentTrials", "type": "int"}, + "max_nodes": {"key": "maxNodes", "type": "int"}, + "max_trials": {"key": "maxTrials", "type": "int"}, + "timeout": {"key": "timeout", "type": "duration"}, + "trial_timeout": {"key": "trialTimeout", "type": "duration"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword max_concurrent_trials: Maximum Concurrent AutoML iterations. :paramtype max_concurrent_trials: int @@ -24284,11 +22970,11 @@ def __init__( :paramtype trial_timeout: ~datetime.timedelta """ super(NlpVerticalLimitSettings, self).__init__(**kwargs) - self.max_concurrent_trials = kwargs.get('max_concurrent_trials', 1) - self.max_nodes = kwargs.get('max_nodes', 1) - self.max_trials = kwargs.get('max_trials', 1) - self.timeout = kwargs.get('timeout', "P7D") - self.trial_timeout = kwargs.get('trial_timeout', None) + self.max_concurrent_trials = kwargs.get("max_concurrent_trials", 1) + self.max_nodes = kwargs.get("max_nodes", 1) + self.max_trials = kwargs.get("max_trials", 1) + self.timeout = kwargs.get("timeout", "P7D") + self.trial_timeout = kwargs.get("trial_timeout", None) class NodeStateCounts(msrest.serialization.Model): @@ -24311,29 +22997,25 @@ class NodeStateCounts(msrest.serialization.Model): """ _validation = { - 'idle_node_count': {'readonly': True}, - 'running_node_count': {'readonly': True}, - 'preparing_node_count': {'readonly': True}, - 'unusable_node_count': {'readonly': True}, - 'leaving_node_count': {'readonly': True}, - 'preempted_node_count': {'readonly': True}, + "idle_node_count": {"readonly": True}, + "running_node_count": {"readonly": True}, + "preparing_node_count": {"readonly": True}, + "unusable_node_count": {"readonly": True}, + "leaving_node_count": {"readonly": True}, + "preempted_node_count": {"readonly": True}, } _attribute_map = { - 'idle_node_count': {'key': 'idleNodeCount', 'type': 'int'}, - 'running_node_count': {'key': 'runningNodeCount', 'type': 'int'}, - 'preparing_node_count': {'key': 'preparingNodeCount', 'type': 'int'}, - 'unusable_node_count': {'key': 'unusableNodeCount', 'type': 'int'}, - 'leaving_node_count': {'key': 'leavingNodeCount', 'type': 'int'}, - 'preempted_node_count': {'key': 'preemptedNodeCount', 'type': 'int'}, + "idle_node_count": {"key": "idleNodeCount", "type": "int"}, + "running_node_count": {"key": "runningNodeCount", "type": "int"}, + "preparing_node_count": {"key": "preparingNodeCount", "type": "int"}, + "unusable_node_count": {"key": "unusableNodeCount", "type": "int"}, + "leaving_node_count": {"key": "leavingNodeCount", "type": "int"}, + "preempted_node_count": {"key": "preemptedNodeCount", "type": "int"}, } - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(NodeStateCounts, self).__init__(**kwargs) self.idle_node_count = None self.running_node_count = None @@ -24390,27 +23072,24 @@ class NoneAuthTypeWorkspaceConnectionProperties(WorkspaceConnectionPropertiesV2) """ _validation = { - 'auth_type': {'required': True}, - 'created_by_workspace_arm_id': {'readonly': True}, - 'group': {'readonly': True}, + "auth_type": {"required": True}, + "created_by_workspace_arm_id": {"readonly": True}, + "group": {"readonly": True}, } _attribute_map = { - 'auth_type': {'key': 'authType', 'type': 'str'}, - 'category': {'key': 'category', 'type': 'str'}, - 'created_by_workspace_arm_id': {'key': 'createdByWorkspaceArmId', 'type': 'str'}, - 'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'}, - 'group': {'key': 'group', 'type': 'str'}, - 'is_shared_to_all': {'key': 'isSharedToAll', 'type': 'bool'}, - 'metadata': {'key': 'metadata', 'type': 'object'}, - 'shared_user_list': {'key': 'sharedUserList', 'type': '[str]'}, - 'target': {'key': 'target', 'type': 'str'}, + "auth_type": {"key": "authType", "type": "str"}, + "category": {"key": "category", "type": "str"}, + "created_by_workspace_arm_id": {"key": "createdByWorkspaceArmId", "type": "str"}, + "expiry_time": {"key": "expiryTime", "type": "iso-8601"}, + "group": {"key": "group", "type": "str"}, + "is_shared_to_all": {"key": "isSharedToAll", "type": "bool"}, + "metadata": {"key": "metadata", "type": "object"}, + "shared_user_list": {"key": "sharedUserList", "type": "[str]"}, + "target": {"key": "target", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword category: Category of the connection. Possible values include: "PythonFeed", "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", @@ -24442,7 +23121,7 @@ def __init__( :paramtype target: str """ super(NoneAuthTypeWorkspaceConnectionProperties, self).__init__(**kwargs) - self.auth_type = 'None' # type: str + self.auth_type = "None" # type: str class NoneDatastoreCredentials(DatastoreCredentials): @@ -24457,21 +23136,17 @@ class NoneDatastoreCredentials(DatastoreCredentials): """ _validation = { - 'credentials_type': {'required': True}, + "credentials_type": {"required": True}, } _attribute_map = { - 'credentials_type': {'key': 'credentialsType', 'type': 'str'}, + "credentials_type": {"key": "credentialsType", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(NoneDatastoreCredentials, self).__init__(**kwargs) - self.credentials_type = 'None' # type: str + self.credentials_type = "None" # type: str class NotebookAccessTokenResult(msrest.serialization.Model): @@ -24498,33 +23173,29 @@ class NotebookAccessTokenResult(msrest.serialization.Model): """ _validation = { - 'access_token': {'readonly': True}, - 'expires_in': {'readonly': True}, - 'host_name': {'readonly': True}, - 'notebook_resource_id': {'readonly': True}, - 'public_dns': {'readonly': True}, - 'refresh_token': {'readonly': True}, - 'scope': {'readonly': True}, - 'token_type': {'readonly': True}, + "access_token": {"readonly": True}, + "expires_in": {"readonly": True}, + "host_name": {"readonly": True}, + "notebook_resource_id": {"readonly": True}, + "public_dns": {"readonly": True}, + "refresh_token": {"readonly": True}, + "scope": {"readonly": True}, + "token_type": {"readonly": True}, } _attribute_map = { - 'access_token': {'key': 'accessToken', 'type': 'str'}, - 'expires_in': {'key': 'expiresIn', 'type': 'int'}, - 'host_name': {'key': 'hostName', 'type': 'str'}, - 'notebook_resource_id': {'key': 'notebookResourceId', 'type': 'str'}, - 'public_dns': {'key': 'publicDns', 'type': 'str'}, - 'refresh_token': {'key': 'refreshToken', 'type': 'str'}, - 'scope': {'key': 'scope', 'type': 'str'}, - 'token_type': {'key': 'tokenType', 'type': 'str'}, + "access_token": {"key": "accessToken", "type": "str"}, + "expires_in": {"key": "expiresIn", "type": "int"}, + "host_name": {"key": "hostName", "type": "str"}, + "notebook_resource_id": {"key": "notebookResourceId", "type": "str"}, + "public_dns": {"key": "publicDns", "type": "str"}, + "refresh_token": {"key": "refreshToken", "type": "str"}, + "scope": {"key": "scope", "type": "str"}, + "token_type": {"key": "tokenType", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(NotebookAccessTokenResult, self).__init__(**kwargs) self.access_token = None self.expires_in = None @@ -24546,14 +23217,11 @@ class NotebookPreparationError(msrest.serialization.Model): """ _attribute_map = { - 'error_message': {'key': 'errorMessage', 'type': 'str'}, - 'status_code': {'key': 'statusCode', 'type': 'int'}, + "error_message": {"key": "errorMessage", "type": "str"}, + "status_code": {"key": "statusCode", "type": "int"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword error_message: :paramtype error_message: str @@ -24561,8 +23229,8 @@ def __init__( :paramtype status_code: int """ super(NotebookPreparationError, self).__init__(**kwargs) - self.error_message = kwargs.get('error_message', None) - self.status_code = kwargs.get('status_code', None) + self.error_message = kwargs.get("error_message", None) + self.status_code = kwargs.get("status_code", None) class NotebookResourceInfo(msrest.serialization.Model): @@ -24580,16 +23248,13 @@ class NotebookResourceInfo(msrest.serialization.Model): """ _attribute_map = { - 'fqdn': {'key': 'fqdn', 'type': 'str'}, - 'is_private_link_enabled': {'key': 'isPrivateLinkEnabled', 'type': 'bool'}, - 'notebook_preparation_error': {'key': 'notebookPreparationError', 'type': 'NotebookPreparationError'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, + "fqdn": {"key": "fqdn", "type": "str"}, + "is_private_link_enabled": {"key": "isPrivateLinkEnabled", "type": "bool"}, + "notebook_preparation_error": {"key": "notebookPreparationError", "type": "NotebookPreparationError"}, + "resource_id": {"key": "resourceId", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword fqdn: :paramtype fqdn: str @@ -24602,10 +23267,10 @@ def __init__( :paramtype resource_id: str """ super(NotebookResourceInfo, self).__init__(**kwargs) - self.fqdn = kwargs.get('fqdn', None) - self.is_private_link_enabled = kwargs.get('is_private_link_enabled', None) - self.notebook_preparation_error = kwargs.get('notebook_preparation_error', None) - self.resource_id = kwargs.get('resource_id', None) + self.fqdn = kwargs.get("fqdn", None) + self.is_private_link_enabled = kwargs.get("is_private_link_enabled", None) + self.notebook_preparation_error = kwargs.get("notebook_preparation_error", None) + self.resource_id = kwargs.get("resource_id", None) class NotificationSetting(msrest.serialization.Model): @@ -24623,15 +23288,12 @@ class NotificationSetting(msrest.serialization.Model): """ _attribute_map = { - 'email_on': {'key': 'emailOn', 'type': '[str]'}, - 'emails': {'key': 'emails', 'type': '[str]'}, - 'webhooks': {'key': 'webhooks', 'type': '{Webhook}'}, + "email_on": {"key": "emailOn", "type": "[str]"}, + "emails": {"key": "emails", "type": "[str]"}, + "webhooks": {"key": "webhooks", "type": "{Webhook}"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword email_on: Send email notification to user on specified notification type. :paramtype email_on: list[str or @@ -24644,9 +23306,9 @@ def __init__( :paramtype webhooks: dict[str, ~azure.mgmt.machinelearningservices.models.Webhook] """ super(NotificationSetting, self).__init__(**kwargs) - self.email_on = kwargs.get('email_on', None) - self.emails = kwargs.get('emails', None) - self.webhooks = kwargs.get('webhooks', None) + self.email_on = kwargs.get("email_on", None) + self.emails = kwargs.get("emails", None) + self.webhooks = kwargs.get("webhooks", None) class NumericalDataDriftMetricThreshold(DataDriftMetricThresholdBase): @@ -24667,20 +23329,17 @@ class NumericalDataDriftMetricThreshold(DataDriftMetricThresholdBase): """ _validation = { - 'data_type': {'required': True}, - 'metric': {'required': True}, + "data_type": {"required": True}, + "metric": {"required": True}, } _attribute_map = { - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'threshold': {'key': 'threshold', 'type': 'MonitoringThreshold'}, - 'metric': {'key': 'metric', 'type': 'str'}, + "data_type": {"key": "dataType", "type": "str"}, + "threshold": {"key": "threshold", "type": "MonitoringThreshold"}, + "metric": {"key": "metric", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword threshold: The threshold value. If null, a default value will be set depending on the selected metric. @@ -24691,8 +23350,8 @@ def __init__( :paramtype metric: str or ~azure.mgmt.machinelearningservices.models.NumericalDataDriftMetric """ super(NumericalDataDriftMetricThreshold, self).__init__(**kwargs) - self.data_type = 'Numerical' # type: str - self.metric = kwargs['metric'] + self.data_type = "Numerical" # type: str + self.metric = kwargs["metric"] class NumericalDataQualityMetricThreshold(DataQualityMetricThresholdBase): @@ -24712,20 +23371,17 @@ class NumericalDataQualityMetricThreshold(DataQualityMetricThresholdBase): """ _validation = { - 'data_type': {'required': True}, - 'metric': {'required': True}, + "data_type": {"required": True}, + "metric": {"required": True}, } _attribute_map = { - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'threshold': {'key': 'threshold', 'type': 'MonitoringThreshold'}, - 'metric': {'key': 'metric', 'type': 'str'}, + "data_type": {"key": "dataType", "type": "str"}, + "threshold": {"key": "threshold", "type": "MonitoringThreshold"}, + "metric": {"key": "metric", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword threshold: The threshold value. If null, a default value will be set depending on the selected metric. @@ -24735,8 +23391,8 @@ def __init__( :paramtype metric: str or ~azure.mgmt.machinelearningservices.models.NumericalDataQualityMetric """ super(NumericalDataQualityMetricThreshold, self).__init__(**kwargs) - self.data_type = 'Numerical' # type: str - self.metric = kwargs['metric'] + self.data_type = "Numerical" # type: str + self.metric = kwargs["metric"] class NumericalPredictionDriftMetricThreshold(PredictionDriftMetricThresholdBase): @@ -24758,20 +23414,17 @@ class NumericalPredictionDriftMetricThreshold(PredictionDriftMetricThresholdBase """ _validation = { - 'data_type': {'required': True}, - 'metric': {'required': True}, + "data_type": {"required": True}, + "metric": {"required": True}, } _attribute_map = { - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'threshold': {'key': 'threshold', 'type': 'MonitoringThreshold'}, - 'metric': {'key': 'metric', 'type': 'str'}, + "data_type": {"key": "dataType", "type": "str"}, + "threshold": {"key": "threshold", "type": "MonitoringThreshold"}, + "metric": {"key": "metric", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword threshold: The threshold value. If null, a default value will be set depending on the selected metric. @@ -24783,8 +23436,8 @@ def __init__( ~azure.mgmt.machinelearningservices.models.NumericalPredictionDriftMetric """ super(NumericalPredictionDriftMetricThreshold, self).__init__(**kwargs) - self.data_type = 'Numerical' # type: str - self.metric = kwargs['metric'] + self.data_type = "Numerical" # type: str + self.metric = kwargs["metric"] class OAuth2AuthTypeWorkspaceConnectionProperties(WorkspaceConnectionPropertiesV2): @@ -24837,28 +23490,25 @@ class OAuth2AuthTypeWorkspaceConnectionProperties(WorkspaceConnectionPropertiesV """ _validation = { - 'auth_type': {'required': True}, - 'created_by_workspace_arm_id': {'readonly': True}, - 'group': {'readonly': True}, + "auth_type": {"required": True}, + "created_by_workspace_arm_id": {"readonly": True}, + "group": {"readonly": True}, } _attribute_map = { - 'auth_type': {'key': 'authType', 'type': 'str'}, - 'category': {'key': 'category', 'type': 'str'}, - 'created_by_workspace_arm_id': {'key': 'createdByWorkspaceArmId', 'type': 'str'}, - 'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'}, - 'group': {'key': 'group', 'type': 'str'}, - 'is_shared_to_all': {'key': 'isSharedToAll', 'type': 'bool'}, - 'metadata': {'key': 'metadata', 'type': 'object'}, - 'shared_user_list': {'key': 'sharedUserList', 'type': '[str]'}, - 'target': {'key': 'target', 'type': 'str'}, - 'credentials': {'key': 'credentials', 'type': 'WorkspaceConnectionOAuth2'}, + "auth_type": {"key": "authType", "type": "str"}, + "category": {"key": "category", "type": "str"}, + "created_by_workspace_arm_id": {"key": "createdByWorkspaceArmId", "type": "str"}, + "expiry_time": {"key": "expiryTime", "type": "iso-8601"}, + "group": {"key": "group", "type": "str"}, + "is_shared_to_all": {"key": "isSharedToAll", "type": "bool"}, + "metadata": {"key": "metadata", "type": "object"}, + "shared_user_list": {"key": "sharedUserList", "type": "[str]"}, + "target": {"key": "target", "type": "str"}, + "credentials": {"key": "credentials", "type": "WorkspaceConnectionOAuth2"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword category: Category of the connection. Possible values include: "PythonFeed", "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", @@ -24893,8 +23543,8 @@ def __init__( :paramtype credentials: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionOAuth2 """ super(OAuth2AuthTypeWorkspaceConnectionProperties, self).__init__(**kwargs) - self.auth_type = 'OAuth2' # type: str - self.credentials = kwargs.get('credentials', None) + self.auth_type = "OAuth2" # type: str + self.credentials = kwargs.get("credentials", None) class Objective(msrest.serialization.Model): @@ -24910,19 +23560,16 @@ class Objective(msrest.serialization.Model): """ _validation = { - 'goal': {'required': True}, - 'primary_metric': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "goal": {"required": True}, + "primary_metric": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'goal': {'key': 'goal', 'type': 'str'}, - 'primary_metric': {'key': 'primaryMetric', 'type': 'str'}, + "goal": {"key": "goal", "type": "str"}, + "primary_metric": {"key": "primaryMetric", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword goal: Required. [Required] Defines supported metric goals for hyperparameter tuning. Possible values include: "Minimize", "Maximize". @@ -24931,8 +23578,8 @@ def __init__( :paramtype primary_metric: str """ super(Objective, self).__init__(**kwargs) - self.goal = kwargs['goal'] - self.primary_metric = kwargs['primary_metric'] + self.goal = kwargs["goal"] + self.primary_metric = kwargs["primary_metric"] class OneLakeDatastore(DatastoreProperties): @@ -24973,31 +23620,28 @@ class OneLakeDatastore(DatastoreProperties): """ _validation = { - 'credentials': {'required': True}, - 'datastore_type': {'required': True}, - 'is_default': {'readonly': True}, - 'artifact': {'required': True}, - 'one_lake_workspace_name': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "credentials": {"required": True}, + "datastore_type": {"required": True}, + "is_default": {"readonly": True}, + "artifact": {"required": True}, + "one_lake_workspace_name": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'credentials': {'key': 'credentials', 'type': 'DatastoreCredentials'}, - 'datastore_type': {'key': 'datastoreType', 'type': 'str'}, - 'intellectual_property': {'key': 'intellectualProperty', 'type': 'IntellectualProperty'}, - 'is_default': {'key': 'isDefault', 'type': 'bool'}, - 'artifact': {'key': 'artifact', 'type': 'OneLakeArtifact'}, - 'endpoint': {'key': 'endpoint', 'type': 'str'}, - 'one_lake_workspace_name': {'key': 'oneLakeWorkspaceName', 'type': 'str'}, - 'service_data_access_auth_identity': {'key': 'serviceDataAccessAuthIdentity', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "credentials": {"key": "credentials", "type": "DatastoreCredentials"}, + "datastore_type": {"key": "datastoreType", "type": "str"}, + "intellectual_property": {"key": "intellectualProperty", "type": "IntellectualProperty"}, + "is_default": {"key": "isDefault", "type": "bool"}, + "artifact": {"key": "artifact", "type": "OneLakeArtifact"}, + "endpoint": {"key": "endpoint", "type": "str"}, + "one_lake_workspace_name": {"key": "oneLakeWorkspaceName", "type": "str"}, + "service_data_access_auth_identity": {"key": "serviceDataAccessAuthIdentity", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword description: The asset description text. :paramtype description: str @@ -25023,11 +23667,11 @@ def __init__( ~azure.mgmt.machinelearningservices.models.ServiceDataAccessAuthIdentity """ super(OneLakeDatastore, self).__init__(**kwargs) - self.datastore_type = 'OneLake' # type: str - self.artifact = kwargs['artifact'] - self.endpoint = kwargs.get('endpoint', None) - self.one_lake_workspace_name = kwargs['one_lake_workspace_name'] - self.service_data_access_auth_identity = kwargs.get('service_data_access_auth_identity', None) + self.datastore_type = "OneLake" # type: str + self.artifact = kwargs["artifact"] + self.endpoint = kwargs.get("endpoint", None) + self.one_lake_workspace_name = kwargs["one_lake_workspace_name"] + self.service_data_access_auth_identity = kwargs.get("service_data_access_auth_identity", None) class OnlineDeployment(TrackedResource): @@ -25064,31 +23708,28 @@ class OnlineDeployment(TrackedResource): """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'location': {'required': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "location": {"required": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'location': {'key': 'location', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'}, - 'kind': {'key': 'kind', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'OnlineDeploymentProperties'}, - 'sku': {'key': 'sku', 'type': 'Sku'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "tags": {"key": "tags", "type": "{str}"}, + "location": {"key": "location", "type": "str"}, + "identity": {"key": "identity", "type": "ManagedServiceIdentity"}, + "kind": {"key": "kind", "type": "str"}, + "properties": {"key": "properties", "type": "OnlineDeploymentProperties"}, + "sku": {"key": "sku", "type": "Sku"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword tags: A set of tags. Resource tags. :paramtype tags: dict[str, str] @@ -25105,10 +23746,10 @@ def __init__( :paramtype sku: ~azure.mgmt.machinelearningservices.models.Sku """ super(OnlineDeployment, self).__init__(**kwargs) - self.identity = kwargs.get('identity', None) - self.kind = kwargs.get('kind', None) - self.properties = kwargs['properties'] - self.sku = kwargs.get('sku', None) + self.identity = kwargs.get("identity", None) + self.kind = kwargs.get("kind", None) + self.properties = kwargs["properties"] + self.sku = kwargs.get("sku", None) class OnlineDeploymentTrackedResourceArmPaginatedResult(msrest.serialization.Model): @@ -25122,14 +23763,11 @@ class OnlineDeploymentTrackedResourceArmPaginatedResult(msrest.serialization.Mod """ _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[OnlineDeployment]'}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[OnlineDeployment]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword next_link: The link to the next page of OnlineDeployment objects. If null, there are no additional pages. @@ -25138,8 +23776,8 @@ def __init__( :paramtype value: list[~azure.mgmt.machinelearningservices.models.OnlineDeployment] """ super(OnlineDeploymentTrackedResourceArmPaginatedResult, self).__init__(**kwargs) - self.next_link = kwargs.get('next_link', None) - self.value = kwargs.get('value', None) + self.next_link = kwargs.get("next_link", None) + self.value = kwargs.get("value", None) class OnlineEndpoint(TrackedResource): @@ -25176,31 +23814,28 @@ class OnlineEndpoint(TrackedResource): """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'location': {'required': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "location": {"required": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'location': {'key': 'location', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'}, - 'kind': {'key': 'kind', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'OnlineEndpointProperties'}, - 'sku': {'key': 'sku', 'type': 'Sku'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "tags": {"key": "tags", "type": "{str}"}, + "location": {"key": "location", "type": "str"}, + "identity": {"key": "identity", "type": "ManagedServiceIdentity"}, + "kind": {"key": "kind", "type": "str"}, + "properties": {"key": "properties", "type": "OnlineEndpointProperties"}, + "sku": {"key": "sku", "type": "Sku"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword tags: A set of tags. Resource tags. :paramtype tags: dict[str, str] @@ -25217,10 +23852,10 @@ def __init__( :paramtype sku: ~azure.mgmt.machinelearningservices.models.Sku """ super(OnlineEndpoint, self).__init__(**kwargs) - self.identity = kwargs.get('identity', None) - self.kind = kwargs.get('kind', None) - self.properties = kwargs['properties'] - self.sku = kwargs.get('sku', None) + self.identity = kwargs.get("identity", None) + self.kind = kwargs.get("kind", None) + self.properties = kwargs["properties"] + self.sku = kwargs.get("sku", None) class OnlineEndpointProperties(EndpointPropertiesBase): @@ -25266,30 +23901,27 @@ class OnlineEndpointProperties(EndpointPropertiesBase): """ _validation = { - 'auth_mode': {'required': True}, - 'scoring_uri': {'readonly': True}, - 'swagger_uri': {'readonly': True}, - 'provisioning_state': {'readonly': True}, + "auth_mode": {"required": True}, + "scoring_uri": {"readonly": True}, + "swagger_uri": {"readonly": True}, + "provisioning_state": {"readonly": True}, } _attribute_map = { - 'auth_mode': {'key': 'authMode', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'keys': {'key': 'keys', 'type': 'EndpointAuthKeys'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'scoring_uri': {'key': 'scoringUri', 'type': 'str'}, - 'swagger_uri': {'key': 'swaggerUri', 'type': 'str'}, - 'compute': {'key': 'compute', 'type': 'str'}, - 'mirror_traffic': {'key': 'mirrorTraffic', 'type': '{int}'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'public_network_access': {'key': 'publicNetworkAccess', 'type': 'str'}, - 'traffic': {'key': 'traffic', 'type': '{int}'}, + "auth_mode": {"key": "authMode", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "keys": {"key": "keys", "type": "EndpointAuthKeys"}, + "properties": {"key": "properties", "type": "{str}"}, + "scoring_uri": {"key": "scoringUri", "type": "str"}, + "swagger_uri": {"key": "swaggerUri", "type": "str"}, + "compute": {"key": "compute", "type": "str"}, + "mirror_traffic": {"key": "mirrorTraffic", "type": "{int}"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "public_network_access": {"key": "publicNetworkAccess", "type": "str"}, + "traffic": {"key": "traffic", "type": "{int}"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword auth_mode: Required. [Required] Use 'Key' for key based authentication and 'AMLToken' for Azure Machine Learning token-based authentication. 'Key' doesn't expire but 'AMLToken' @@ -25318,11 +23950,11 @@ def __init__( :paramtype traffic: dict[str, int] """ super(OnlineEndpointProperties, self).__init__(**kwargs) - self.compute = kwargs.get('compute', None) - self.mirror_traffic = kwargs.get('mirror_traffic', None) + self.compute = kwargs.get("compute", None) + self.mirror_traffic = kwargs.get("mirror_traffic", None) self.provisioning_state = None - self.public_network_access = kwargs.get('public_network_access', None) - self.traffic = kwargs.get('traffic', None) + self.public_network_access = kwargs.get("public_network_access", None) + self.traffic = kwargs.get("traffic", None) class OnlineEndpointTrackedResourceArmPaginatedResult(msrest.serialization.Model): @@ -25336,14 +23968,11 @@ class OnlineEndpointTrackedResourceArmPaginatedResult(msrest.serialization.Model """ _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[OnlineEndpoint]'}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[OnlineEndpoint]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword next_link: The link to the next page of OnlineEndpoint objects. If null, there are no additional pages. @@ -25352,8 +23981,8 @@ def __init__( :paramtype value: list[~azure.mgmt.machinelearningservices.models.OnlineEndpoint] """ super(OnlineEndpointTrackedResourceArmPaginatedResult, self).__init__(**kwargs) - self.next_link = kwargs.get('next_link', None) - self.value = kwargs.get('value', None) + self.next_link = kwargs.get("next_link", None) + self.value = kwargs.get("value", None) class OnlineInferenceConfiguration(msrest.serialization.Model): @@ -25373,17 +24002,14 @@ class OnlineInferenceConfiguration(msrest.serialization.Model): """ _attribute_map = { - 'configurations': {'key': 'configurations', 'type': '{str}'}, - 'entry_script': {'key': 'entryScript', 'type': 'str'}, - 'liveness_route': {'key': 'livenessRoute', 'type': 'Route'}, - 'readiness_route': {'key': 'readinessRoute', 'type': 'Route'}, - 'scoring_route': {'key': 'scoringRoute', 'type': 'Route'}, + "configurations": {"key": "configurations", "type": "{str}"}, + "entry_script": {"key": "entryScript", "type": "str"}, + "liveness_route": {"key": "livenessRoute", "type": "Route"}, + "readiness_route": {"key": "readinessRoute", "type": "Route"}, + "scoring_route": {"key": "scoringRoute", "type": "Route"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword configurations: Additional configurations. :paramtype configurations: dict[str, str] @@ -25398,11 +24024,11 @@ def __init__( :paramtype scoring_route: ~azure.mgmt.machinelearningservices.models.Route """ super(OnlineInferenceConfiguration, self).__init__(**kwargs) - self.configurations = kwargs.get('configurations', None) - self.entry_script = kwargs.get('entry_script', None) - self.liveness_route = kwargs.get('liveness_route', None) - self.readiness_route = kwargs.get('readiness_route', None) - self.scoring_route = kwargs.get('scoring_route', None) + self.configurations = kwargs.get("configurations", None) + self.entry_script = kwargs.get("entry_script", None) + self.liveness_route = kwargs.get("liveness_route", None) + self.readiness_route = kwargs.get("readiness_route", None) + self.scoring_route = kwargs.get("scoring_route", None) class OnlineRequestSettings(msrest.serialization.Model): @@ -25421,15 +24047,12 @@ class OnlineRequestSettings(msrest.serialization.Model): """ _attribute_map = { - 'max_concurrent_requests_per_instance': {'key': 'maxConcurrentRequestsPerInstance', 'type': 'int'}, - 'max_queue_wait': {'key': 'maxQueueWait', 'type': 'duration'}, - 'request_timeout': {'key': 'requestTimeout', 'type': 'duration'}, + "max_concurrent_requests_per_instance": {"key": "maxConcurrentRequestsPerInstance", "type": "int"}, + "max_queue_wait": {"key": "maxQueueWait", "type": "duration"}, + "request_timeout": {"key": "requestTimeout", "type": "duration"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword max_concurrent_requests_per_instance: The number of maximum concurrent requests per node allowed per deployment. Defaults to 1. @@ -25443,13 +24066,14 @@ def __init__( :paramtype request_timeout: ~datetime.timedelta """ super(OnlineRequestSettings, self).__init__(**kwargs) - self.max_concurrent_requests_per_instance = kwargs.get('max_concurrent_requests_per_instance', 1) - self.max_queue_wait = kwargs.get('max_queue_wait', "PT0.5S") - self.request_timeout = kwargs.get('request_timeout', "PT5S") + self.max_concurrent_requests_per_instance = kwargs.get("max_concurrent_requests_per_instance", 1) + self.max_queue_wait = kwargs.get("max_queue_wait", "PT0.5S") + self.request_timeout = kwargs.get("request_timeout", "PT5S") -class OpenAIEndpointDeploymentResourceProperties(EndpointDeploymentResourceProperties, - CognitiveServiceEndpointDeploymentResourceProperties): +class OpenAIEndpointDeploymentResourceProperties( + EndpointDeploymentResourceProperties, CognitiveServiceEndpointDeploymentResourceProperties +): """OpenAIEndpointDeploymentResourceProperties. Variables are only populated by the server, and will be ignored when sending a request. @@ -25477,25 +24101,22 @@ class OpenAIEndpointDeploymentResourceProperties(EndpointDeploymentResourcePrope """ _validation = { - 'model': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'type': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9._]'}, + "model": {"required": True}, + "provisioning_state": {"readonly": True}, + "type": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9._]"}, } _attribute_map = { - 'model': {'key': 'model', 'type': 'EndpointDeploymentModel'}, - 'rai_policy_name': {'key': 'raiPolicyName', 'type': 'str'}, - 'sku': {'key': 'sku', 'type': 'CognitiveServicesSku'}, - 'version_upgrade_option': {'key': 'versionUpgradeOption', 'type': 'str'}, - 'failure_reason': {'key': 'failureReason', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, + "model": {"key": "model", "type": "EndpointDeploymentModel"}, + "rai_policy_name": {"key": "raiPolicyName", "type": "str"}, + "sku": {"key": "sku", "type": "CognitiveServicesSku"}, + "version_upgrade_option": {"key": "versionUpgradeOption", "type": "str"}, + "failure_reason": {"key": "failureReason", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "type": {"key": "type", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword model: Required. Model used for the endpoint deployment. :paramtype model: ~azure.mgmt.machinelearningservices.models.EndpointDeploymentModel @@ -25511,12 +24132,12 @@ def __init__( :paramtype failure_reason: str """ super(OpenAIEndpointDeploymentResourceProperties, self).__init__(**kwargs) - self.model = kwargs['model'] - self.rai_policy_name = kwargs.get('rai_policy_name', None) - self.sku = kwargs.get('sku', None) - self.version_upgrade_option = kwargs.get('version_upgrade_option', None) - self.type = 'Azure.OpenAI' # type: str - self.failure_reason = kwargs.get('failure_reason', None) + self.model = kwargs["model"] + self.rai_policy_name = kwargs.get("rai_policy_name", None) + self.sku = kwargs.get("sku", None) + self.version_upgrade_option = kwargs.get("version_upgrade_option", None) + self.type = "Azure.OpenAI" # type: str + self.failure_reason = kwargs.get("failure_reason", None) self.provisioning_state = None @@ -25547,23 +24168,20 @@ class OpenAIEndpointResourceProperties(EndpointResourceProperties): """ _validation = { - 'endpoint_type': {'required': True}, - 'provisioning_state': {'readonly': True}, + "endpoint_type": {"required": True}, + "provisioning_state": {"readonly": True}, } _attribute_map = { - 'associated_resource_id': {'key': 'associatedResourceId', 'type': 'str'}, - 'endpoint_type': {'key': 'endpointType', 'type': 'str'}, - 'endpoint_uri': {'key': 'endpointUri', 'type': 'str'}, - 'failure_reason': {'key': 'failureReason', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + "associated_resource_id": {"key": "associatedResourceId", "type": "str"}, + "endpoint_type": {"key": "endpointType", "type": "str"}, + "endpoint_uri": {"key": "endpointUri", "type": "str"}, + "failure_reason": {"key": "failureReason", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword associated_resource_id: Byo resource id for creating the built-in model service endpoints. @@ -25576,7 +24194,7 @@ def __init__( :paramtype name: str """ super(OpenAIEndpointResourceProperties, self).__init__(**kwargs) - self.endpoint_type = 'Azure.OpenAI' # type: str + self.endpoint_type = "Azure.OpenAI" # type: str class Operation(msrest.serialization.Model): @@ -25602,24 +24220,21 @@ class Operation(msrest.serialization.Model): """ _validation = { - 'name': {'readonly': True}, - 'is_data_action': {'readonly': True}, - 'origin': {'readonly': True}, - 'action_type': {'readonly': True}, + "name": {"readonly": True}, + "is_data_action": {"readonly": True}, + "origin": {"readonly": True}, + "action_type": {"readonly": True}, } _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'is_data_action': {'key': 'isDataAction', 'type': 'bool'}, - 'display': {'key': 'display', 'type': 'OperationDisplay'}, - 'origin': {'key': 'origin', 'type': 'str'}, - 'action_type': {'key': 'actionType', 'type': 'str'}, + "name": {"key": "name", "type": "str"}, + "is_data_action": {"key": "isDataAction", "type": "bool"}, + "display": {"key": "display", "type": "OperationDisplay"}, + "origin": {"key": "origin", "type": "str"}, + "action_type": {"key": "actionType", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword display: Localized display information for this particular operation. :paramtype display: ~azure.mgmt.machinelearningservices.models.OperationDisplay @@ -25627,7 +24242,7 @@ def __init__( super(Operation, self).__init__(**kwargs) self.name = None self.is_data_action = None - self.display = kwargs.get('display', None) + self.display = kwargs.get("display", None) self.origin = None self.action_type = None @@ -25652,25 +24267,21 @@ class OperationDisplay(msrest.serialization.Model): """ _validation = { - 'provider': {'readonly': True}, - 'resource': {'readonly': True}, - 'operation': {'readonly': True}, - 'description': {'readonly': True}, + "provider": {"readonly": True}, + "resource": {"readonly": True}, + "operation": {"readonly": True}, + "description": {"readonly": True}, } _attribute_map = { - 'provider': {'key': 'provider', 'type': 'str'}, - 'resource': {'key': 'resource', 'type': 'str'}, - 'operation': {'key': 'operation', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, + "provider": {"key": "provider", "type": "str"}, + "resource": {"key": "resource", "type": "str"}, + "operation": {"key": "operation", "type": "str"}, + "description": {"key": "description", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(OperationDisplay, self).__init__(**kwargs) self.provider = None self.resource = None @@ -25690,21 +24301,17 @@ class OperationListResult(msrest.serialization.Model): """ _validation = { - 'value': {'readonly': True}, - 'next_link': {'readonly': True}, + "value": {"readonly": True}, + "next_link": {"readonly": True}, } _attribute_map = { - 'value': {'key': 'value', 'type': '[Operation]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + "value": {"key": "value", "type": "[Operation]"}, + "next_link": {"key": "nextLink", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(OperationListResult, self).__init__(**kwargs) self.value = None self.next_link = None @@ -25726,16 +24333,13 @@ class OsPatchingStatus(msrest.serialization.Model): """ _attribute_map = { - 'patch_status': {'key': 'patchStatus', 'type': 'str'}, - 'latest_patch_time': {'key': 'latestPatchTime', 'type': 'str'}, - 'reboot_pending': {'key': 'rebootPending', 'type': 'bool'}, - 'scheduled_reboot_time': {'key': 'scheduledRebootTime', 'type': 'str'}, + "patch_status": {"key": "patchStatus", "type": "str"}, + "latest_patch_time": {"key": "latestPatchTime", "type": "str"}, + "reboot_pending": {"key": "rebootPending", "type": "bool"}, + "scheduled_reboot_time": {"key": "scheduledRebootTime", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword patch_status: The os patching status. Possible values include: "CompletedWithWarnings", "Failed", "InProgress", "Succeeded", "Unknown". @@ -25749,10 +24353,10 @@ def __init__( :paramtype scheduled_reboot_time: str """ super(OsPatchingStatus, self).__init__(**kwargs) - self.patch_status = kwargs.get('patch_status', None) - self.latest_patch_time = kwargs.get('latest_patch_time', None) - self.reboot_pending = kwargs.get('reboot_pending', None) - self.scheduled_reboot_time = kwargs.get('scheduled_reboot_time', None) + self.patch_status = kwargs.get("patch_status", None) + self.latest_patch_time = kwargs.get("latest_patch_time", None) + self.reboot_pending = kwargs.get("reboot_pending", None) + self.scheduled_reboot_time = kwargs.get("scheduled_reboot_time", None) class OutboundRuleBasicResource(Resource): @@ -25779,32 +24383,29 @@ class OutboundRuleBasicResource(Resource): """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'properties': {'key': 'properties', 'type': 'OutboundRule'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "OutboundRule"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword properties: Required. Outbound Rule for the managed network of a machine learning workspace. :paramtype properties: ~azure.mgmt.machinelearningservices.models.OutboundRule """ super(OutboundRuleBasicResource, self).__init__(**kwargs) - self.properties = kwargs['properties'] + self.properties = kwargs["properties"] class OutboundRuleListResult(msrest.serialization.Model): @@ -25819,14 +24420,11 @@ class OutboundRuleListResult(msrest.serialization.Model): """ _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[OutboundRuleBasicResource]'}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[OutboundRuleBasicResource]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword next_link: The link to the next page constructed using the continuationToken. If null, there are no additional pages. @@ -25836,8 +24434,8 @@ def __init__( :paramtype value: list[~azure.mgmt.machinelearningservices.models.OutboundRuleBasicResource] """ super(OutboundRuleListResult, self).__init__(**kwargs) - self.next_link = kwargs.get('next_link', None) - self.value = kwargs.get('value', None) + self.next_link = kwargs.get("next_link", None) + self.value = kwargs.get("value", None) class OutputPathAssetReference(AssetReferenceBase): @@ -25855,19 +24453,16 @@ class OutputPathAssetReference(AssetReferenceBase): """ _validation = { - 'reference_type': {'required': True}, + "reference_type": {"required": True}, } _attribute_map = { - 'reference_type': {'key': 'referenceType', 'type': 'str'}, - 'job_id': {'key': 'jobId', 'type': 'str'}, - 'path': {'key': 'path', 'type': 'str'}, + "reference_type": {"key": "referenceType", "type": "str"}, + "job_id": {"key": "jobId", "type": "str"}, + "path": {"key": "path", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword job_id: ARM resource ID of the job. :paramtype job_id: str @@ -25875,9 +24470,9 @@ def __init__( :paramtype path: str """ super(OutputPathAssetReference, self).__init__(**kwargs) - self.reference_type = 'OutputPath' # type: str - self.job_id = kwargs.get('job_id', None) - self.path = kwargs.get('path', None) + self.reference_type = "OutputPath" # type: str + self.job_id = kwargs.get("job_id", None) + self.path = kwargs.get("path", None) class PackageInputPathBase(msrest.serialization.Model): @@ -25894,24 +24489,23 @@ class PackageInputPathBase(msrest.serialization.Model): """ _validation = { - 'input_path_type': {'required': True}, + "input_path_type": {"required": True}, } _attribute_map = { - 'input_path_type': {'key': 'inputPathType', 'type': 'str'}, + "input_path_type": {"key": "inputPathType", "type": "str"}, } _subtype_map = { - 'input_path_type': {'PathId': 'PackageInputPathId', 'PathVersion': 'PackageInputPathVersion', - 'Url': 'PackageInputPathUrl'} + "input_path_type": { + "PathId": "PackageInputPathId", + "PathVersion": "PackageInputPathVersion", + "Url": "PackageInputPathUrl", + } } - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(PackageInputPathBase, self).__init__(**kwargs) self.input_path_type = None # type: Optional[str] @@ -25929,25 +24523,22 @@ class PackageInputPathId(PackageInputPathBase): """ _validation = { - 'input_path_type': {'required': True}, + "input_path_type": {"required": True}, } _attribute_map = { - 'input_path_type': {'key': 'inputPathType', 'type': 'str'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, + "input_path_type": {"key": "inputPathType", "type": "str"}, + "resource_id": {"key": "resourceId", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword resource_id: Input resource id. :paramtype resource_id: str """ super(PackageInputPathId, self).__init__(**kwargs) - self.input_path_type = 'PathId' # type: str - self.resource_id = kwargs.get('resource_id', None) + self.input_path_type = "PathId" # type: str + self.resource_id = kwargs.get("resource_id", None) class PackageInputPathUrl(PackageInputPathBase): @@ -25963,25 +24554,22 @@ class PackageInputPathUrl(PackageInputPathBase): """ _validation = { - 'input_path_type': {'required': True}, + "input_path_type": {"required": True}, } _attribute_map = { - 'input_path_type': {'key': 'inputPathType', 'type': 'str'}, - 'url': {'key': 'url', 'type': 'str'}, + "input_path_type": {"key": "inputPathType", "type": "str"}, + "url": {"key": "url", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword url: Input path url. :paramtype url: str """ super(PackageInputPathUrl, self).__init__(**kwargs) - self.input_path_type = 'Url' # type: str - self.url = kwargs.get('url', None) + self.input_path_type = "Url" # type: str + self.url = kwargs.get("url", None) class PackageInputPathVersion(PackageInputPathBase): @@ -25999,19 +24587,16 @@ class PackageInputPathVersion(PackageInputPathBase): """ _validation = { - 'input_path_type': {'required': True}, + "input_path_type": {"required": True}, } _attribute_map = { - 'input_path_type': {'key': 'inputPathType', 'type': 'str'}, - 'resource_name': {'key': 'resourceName', 'type': 'str'}, - 'resource_version': {'key': 'resourceVersion', 'type': 'str'}, + "input_path_type": {"key": "inputPathType", "type": "str"}, + "resource_name": {"key": "resourceName", "type": "str"}, + "resource_version": {"key": "resourceVersion", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword resource_name: Input resource name. :paramtype resource_name: str @@ -26019,9 +24604,9 @@ def __init__( :paramtype resource_version: str """ super(PackageInputPathVersion, self).__init__(**kwargs) - self.input_path_type = 'PathVersion' # type: str - self.resource_name = kwargs.get('resource_name', None) - self.resource_version = kwargs.get('resource_version', None) + self.input_path_type = "PathVersion" # type: str + self.resource_name = kwargs.get("resource_name", None) + self.resource_version = kwargs.get("resource_version", None) class PackageRequest(msrest.serialization.Model): @@ -26050,25 +24635,22 @@ class PackageRequest(msrest.serialization.Model): """ _validation = { - 'inferencing_server': {'required': True}, - 'target_environment_id': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "inferencing_server": {"required": True}, + "target_environment_id": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'base_environment_source': {'key': 'baseEnvironmentSource', 'type': 'BaseEnvironmentSource'}, - 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, - 'inferencing_server': {'key': 'inferencingServer', 'type': 'InferencingServer'}, - 'inputs': {'key': 'inputs', 'type': '[ModelPackageInput]'}, - 'model_configuration': {'key': 'modelConfiguration', 'type': 'ModelConfiguration'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'target_environment_id': {'key': 'targetEnvironmentId', 'type': 'str'}, + "base_environment_source": {"key": "baseEnvironmentSource", "type": "BaseEnvironmentSource"}, + "environment_variables": {"key": "environmentVariables", "type": "{str}"}, + "inferencing_server": {"key": "inferencingServer", "type": "InferencingServer"}, + "inputs": {"key": "inputs", "type": "[ModelPackageInput]"}, + "model_configuration": {"key": "modelConfiguration", "type": "ModelConfiguration"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "target_environment_id": {"key": "targetEnvironmentId", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword base_environment_source: Base environment to start with. :paramtype base_environment_source: @@ -26090,14 +24672,14 @@ def __init__( :paramtype target_environment_id: str """ super(PackageRequest, self).__init__(**kwargs) - self.base_environment_source = kwargs.get('base_environment_source', None) - self.environment_variables = kwargs.get('environment_variables', None) - self.inferencing_server = kwargs['inferencing_server'] - self.inputs = kwargs.get('inputs', None) - self.model_configuration = kwargs.get('model_configuration', None) - self.properties = kwargs.get('properties', None) - self.tags = kwargs.get('tags', None) - self.target_environment_id = kwargs['target_environment_id'] + self.base_environment_source = kwargs.get("base_environment_source", None) + self.environment_variables = kwargs.get("environment_variables", None) + self.inferencing_server = kwargs["inferencing_server"] + self.inputs = kwargs.get("inputs", None) + self.model_configuration = kwargs.get("model_configuration", None) + self.properties = kwargs.get("properties", None) + self.tags = kwargs.get("tags", None) + self.target_environment_id = kwargs["target_environment_id"] class PackageResponse(msrest.serialization.Model): @@ -26132,39 +24714,35 @@ class PackageResponse(msrest.serialization.Model): """ _validation = { - 'base_environment_source': {'readonly': True}, - 'build_id': {'readonly': True}, - 'build_state': {'readonly': True}, - 'environment_variables': {'readonly': True}, - 'inferencing_server': {'readonly': True}, - 'inputs': {'readonly': True}, - 'log_url': {'readonly': True}, - 'model_configuration': {'readonly': True}, - 'properties': {'readonly': True}, - 'tags': {'readonly': True}, - 'target_environment_id': {'readonly': True}, + "base_environment_source": {"readonly": True}, + "build_id": {"readonly": True}, + "build_state": {"readonly": True}, + "environment_variables": {"readonly": True}, + "inferencing_server": {"readonly": True}, + "inputs": {"readonly": True}, + "log_url": {"readonly": True}, + "model_configuration": {"readonly": True}, + "properties": {"readonly": True}, + "tags": {"readonly": True}, + "target_environment_id": {"readonly": True}, } _attribute_map = { - 'base_environment_source': {'key': 'baseEnvironmentSource', 'type': 'BaseEnvironmentSource'}, - 'build_id': {'key': 'buildId', 'type': 'str'}, - 'build_state': {'key': 'buildState', 'type': 'str'}, - 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, - 'inferencing_server': {'key': 'inferencingServer', 'type': 'InferencingServer'}, - 'inputs': {'key': 'inputs', 'type': '[ModelPackageInput]'}, - 'log_url': {'key': 'logUrl', 'type': 'str'}, - 'model_configuration': {'key': 'modelConfiguration', 'type': 'ModelConfiguration'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'target_environment_id': {'key': 'targetEnvironmentId', 'type': 'str'}, + "base_environment_source": {"key": "baseEnvironmentSource", "type": "BaseEnvironmentSource"}, + "build_id": {"key": "buildId", "type": "str"}, + "build_state": {"key": "buildState", "type": "str"}, + "environment_variables": {"key": "environmentVariables", "type": "{str}"}, + "inferencing_server": {"key": "inferencingServer", "type": "InferencingServer"}, + "inputs": {"key": "inputs", "type": "[ModelPackageInput]"}, + "log_url": {"key": "logUrl", "type": "str"}, + "model_configuration": {"key": "modelConfiguration", "type": "ModelConfiguration"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "target_environment_id": {"key": "targetEnvironmentId", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(PackageResponse, self).__init__(**kwargs) self.base_environment_source = None self.build_id = None @@ -26189,14 +24767,11 @@ class PaginatedComputeResourcesList(msrest.serialization.Model): """ _attribute_map = { - 'value': {'key': 'value', 'type': '[ComputeResource]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + "value": {"key": "value", "type": "[ComputeResource]"}, + "next_link": {"key": "nextLink", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword value: An array of Machine Learning compute objects wrapped in ARM resource envelope. :paramtype value: list[~azure.mgmt.machinelearningservices.models.ComputeResource] @@ -26204,8 +24779,8 @@ def __init__( :paramtype next_link: str """ super(PaginatedComputeResourcesList, self).__init__(**kwargs) - self.value = kwargs.get('value', None) - self.next_link = kwargs.get('next_link', None) + self.value = kwargs.get("value", None) + self.next_link = kwargs.get("next_link", None) class PartialBatchDeployment(msrest.serialization.Model): @@ -26216,19 +24791,16 @@ class PartialBatchDeployment(msrest.serialization.Model): """ _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword description: Description of the endpoint deployment. :paramtype description: str """ super(PartialBatchDeployment, self).__init__(**kwargs) - self.description = kwargs.get('description', None) + self.description = kwargs.get("description", None) class PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties(msrest.serialization.Model): @@ -26241,14 +24813,11 @@ class PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties(msrest.s """ _attribute_map = { - 'properties': {'key': 'properties', 'type': 'PartialBatchDeployment'}, - 'tags': {'key': 'tags', 'type': '{str}'}, + "properties": {"key": "properties", "type": "PartialBatchDeployment"}, + "tags": {"key": "tags", "type": "{str}"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword properties: Additional attributes of the entity. :paramtype properties: ~azure.mgmt.machinelearningservices.models.PartialBatchDeployment @@ -26256,8 +24825,8 @@ def __init__( :paramtype tags: dict[str, str] """ super(PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) - self.tags = kwargs.get('tags', None) + self.properties = kwargs.get("properties", None) + self.tags = kwargs.get("tags", None) class PartialJobBase(msrest.serialization.Model): @@ -26269,20 +24838,17 @@ class PartialJobBase(msrest.serialization.Model): """ _attribute_map = { - 'notification_setting': {'key': 'notificationSetting', 'type': 'PartialNotificationSetting'}, + "notification_setting": {"key": "notificationSetting", "type": "PartialNotificationSetting"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword notification_setting: Mutable notification setting for the job. :paramtype notification_setting: ~azure.mgmt.machinelearningservices.models.PartialNotificationSetting """ super(PartialJobBase, self).__init__(**kwargs) - self.notification_setting = kwargs.get('notification_setting', None) + self.notification_setting = kwargs.get("notification_setting", None) class PartialJobBasePartialResource(msrest.serialization.Model): @@ -26293,19 +24859,16 @@ class PartialJobBasePartialResource(msrest.serialization.Model): """ _attribute_map = { - 'properties': {'key': 'properties', 'type': 'PartialJobBase'}, + "properties": {"key": "properties", "type": "PartialJobBase"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword properties: Additional attributes of the entity. :paramtype properties: ~azure.mgmt.machinelearningservices.models.PartialJobBase """ super(PartialJobBasePartialResource, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) + self.properties = kwargs.get("properties", None) class PartialManagedServiceIdentity(msrest.serialization.Model): @@ -26323,14 +24886,11 @@ class PartialManagedServiceIdentity(msrest.serialization.Model): """ _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'user_assigned_identities': {'key': 'userAssignedIdentities', 'type': '{object}'}, + "type": {"key": "type", "type": "str"}, + "user_assigned_identities": {"key": "userAssignedIdentities", "type": "{object}"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword type: Managed service identity (system assigned and/or user assigned identities). Possible values include: "None", "SystemAssigned", "UserAssigned", @@ -26343,8 +24903,8 @@ def __init__( :paramtype user_assigned_identities: dict[str, any] """ super(PartialManagedServiceIdentity, self).__init__(**kwargs) - self.type = kwargs.get('type', None) - self.user_assigned_identities = kwargs.get('user_assigned_identities', None) + self.type = kwargs.get("type", None) + self.user_assigned_identities = kwargs.get("user_assigned_identities", None) class PartialMinimalTrackedResource(msrest.serialization.Model): @@ -26355,19 +24915,16 @@ class PartialMinimalTrackedResource(msrest.serialization.Model): """ _attribute_map = { - 'tags': {'key': 'tags', 'type': '{str}'}, + "tags": {"key": "tags", "type": "{str}"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword tags: A set of tags. Resource tags. :paramtype tags: dict[str, str] """ super(PartialMinimalTrackedResource, self).__init__(**kwargs) - self.tags = kwargs.get('tags', None) + self.tags = kwargs.get("tags", None) class PartialMinimalTrackedResourceWithIdentity(PartialMinimalTrackedResource): @@ -26380,14 +24937,11 @@ class PartialMinimalTrackedResourceWithIdentity(PartialMinimalTrackedResource): """ _attribute_map = { - 'tags': {'key': 'tags', 'type': '{str}'}, - 'identity': {'key': 'identity', 'type': 'PartialManagedServiceIdentity'}, + "tags": {"key": "tags", "type": "{str}"}, + "identity": {"key": "identity", "type": "PartialManagedServiceIdentity"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword tags: A set of tags. Resource tags. :paramtype tags: dict[str, str] @@ -26395,7 +24949,7 @@ def __init__( :paramtype identity: ~azure.mgmt.machinelearningservices.models.PartialManagedServiceIdentity """ super(PartialMinimalTrackedResourceWithIdentity, self).__init__(**kwargs) - self.identity = kwargs.get('identity', None) + self.identity = kwargs.get("identity", None) class PartialMinimalTrackedResourceWithSku(PartialMinimalTrackedResource): @@ -26408,14 +24962,11 @@ class PartialMinimalTrackedResourceWithSku(PartialMinimalTrackedResource): """ _attribute_map = { - 'tags': {'key': 'tags', 'type': '{str}'}, - 'sku': {'key': 'sku', 'type': 'PartialSku'}, + "tags": {"key": "tags", "type": "{str}"}, + "sku": {"key": "sku", "type": "PartialSku"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword tags: A set of tags. Resource tags. :paramtype tags: dict[str, str] @@ -26423,7 +24974,7 @@ def __init__( :paramtype sku: ~azure.mgmt.machinelearningservices.models.PartialSku """ super(PartialMinimalTrackedResourceWithSku, self).__init__(**kwargs) - self.sku = kwargs.get('sku', None) + self.sku = kwargs.get("sku", None) class PartialMinimalTrackedResourceWithSkuAndIdentity(PartialMinimalTrackedResource): @@ -26438,15 +24989,12 @@ class PartialMinimalTrackedResourceWithSkuAndIdentity(PartialMinimalTrackedResou """ _attribute_map = { - 'tags': {'key': 'tags', 'type': '{str}'}, - 'identity': {'key': 'identity', 'type': 'PartialManagedServiceIdentity'}, - 'sku': {'key': 'sku', 'type': 'PartialSku'}, + "tags": {"key": "tags", "type": "{str}"}, + "identity": {"key": "identity", "type": "PartialManagedServiceIdentity"}, + "sku": {"key": "sku", "type": "PartialSku"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword tags: A set of tags. Resource tags. :paramtype tags: dict[str, str] @@ -26456,8 +25004,8 @@ def __init__( :paramtype sku: ~azure.mgmt.machinelearningservices.models.PartialSku """ super(PartialMinimalTrackedResourceWithSkuAndIdentity, self).__init__(**kwargs) - self.identity = kwargs.get('identity', None) - self.sku = kwargs.get('sku', None) + self.identity = kwargs.get("identity", None) + self.sku = kwargs.get("sku", None) class PartialNotificationSetting(msrest.serialization.Model): @@ -26469,20 +25017,17 @@ class PartialNotificationSetting(msrest.serialization.Model): """ _attribute_map = { - 'webhooks': {'key': 'webhooks', 'type': '{Webhook}'}, + "webhooks": {"key": "webhooks", "type": "{Webhook}"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword webhooks: Send webhook callback to a service. Key is a user-provided name for the webhook. :paramtype webhooks: dict[str, ~azure.mgmt.machinelearningservices.models.Webhook] """ super(PartialNotificationSetting, self).__init__(**kwargs) - self.webhooks = kwargs.get('webhooks', None) + self.webhooks = kwargs.get("webhooks", None) class PartialRegistryPartialTrackedResource(msrest.serialization.Model): @@ -26498,15 +25043,12 @@ class PartialRegistryPartialTrackedResource(msrest.serialization.Model): """ _attribute_map = { - 'identity': {'key': 'identity', 'type': 'RegistryPartialManagedServiceIdentity'}, - 'sku': {'key': 'sku', 'type': 'PartialSku'}, - 'tags': {'key': 'tags', 'type': '{str}'}, + "identity": {"key": "identity", "type": "RegistryPartialManagedServiceIdentity"}, + "sku": {"key": "sku", "type": "PartialSku"}, + "tags": {"key": "tags", "type": "{str}"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword identity: Managed service identity (system assigned and/or user assigned identities). :paramtype identity: @@ -26517,9 +25059,9 @@ def __init__( :paramtype tags: dict[str, str] """ super(PartialRegistryPartialTrackedResource, self).__init__(**kwargs) - self.identity = kwargs.get('identity', None) - self.sku = kwargs.get('sku', None) - self.tags = kwargs.get('tags', None) + self.identity = kwargs.get("identity", None) + self.sku = kwargs.get("sku", None) + self.tags = kwargs.get("tags", None) class PartialSku(msrest.serialization.Model): @@ -26543,17 +25085,14 @@ class PartialSku(msrest.serialization.Model): """ _attribute_map = { - 'capacity': {'key': 'capacity', 'type': 'int'}, - 'family': {'key': 'family', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'size': {'key': 'size', 'type': 'str'}, - 'tier': {'key': 'tier', 'type': 'str'}, + "capacity": {"key": "capacity", "type": "int"}, + "family": {"key": "family", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "size": {"key": "size", "type": "str"}, + "tier": {"key": "tier", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword capacity: If the SKU supports scale out/in then the capacity integer should be included. If scale out/in is not possible for the resource this may be omitted. @@ -26572,11 +25111,11 @@ def __init__( :paramtype tier: str or ~azure.mgmt.machinelearningservices.models.SkuTier """ super(PartialSku, self).__init__(**kwargs) - self.capacity = kwargs.get('capacity', None) - self.family = kwargs.get('family', None) - self.name = kwargs.get('name', None) - self.size = kwargs.get('size', None) - self.tier = kwargs.get('tier', None) + self.capacity = kwargs.get("capacity", None) + self.family = kwargs.get("family", None) + self.name = kwargs.get("name", None) + self.size = kwargs.get("size", None) + self.tier = kwargs.get("tier", None) class Password(msrest.serialization.Model): @@ -26591,21 +25130,17 @@ class Password(msrest.serialization.Model): """ _validation = { - 'name': {'readonly': True}, - 'value': {'readonly': True}, + "name": {"readonly": True}, + "value": {"readonly": True}, } _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'str'}, + "name": {"key": "name", "type": "str"}, + "value": {"key": "value", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(Password, self).__init__(**kwargs) self.name = None self.value = None @@ -26661,28 +25196,25 @@ class PATAuthTypeWorkspaceConnectionProperties(WorkspaceConnectionPropertiesV2): """ _validation = { - 'auth_type': {'required': True}, - 'created_by_workspace_arm_id': {'readonly': True}, - 'group': {'readonly': True}, + "auth_type": {"required": True}, + "created_by_workspace_arm_id": {"readonly": True}, + "group": {"readonly": True}, } _attribute_map = { - 'auth_type': {'key': 'authType', 'type': 'str'}, - 'category': {'key': 'category', 'type': 'str'}, - 'created_by_workspace_arm_id': {'key': 'createdByWorkspaceArmId', 'type': 'str'}, - 'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'}, - 'group': {'key': 'group', 'type': 'str'}, - 'is_shared_to_all': {'key': 'isSharedToAll', 'type': 'bool'}, - 'metadata': {'key': 'metadata', 'type': 'object'}, - 'shared_user_list': {'key': 'sharedUserList', 'type': '[str]'}, - 'target': {'key': 'target', 'type': 'str'}, - 'credentials': {'key': 'credentials', 'type': 'WorkspaceConnectionPersonalAccessToken'}, + "auth_type": {"key": "authType", "type": "str"}, + "category": {"key": "category", "type": "str"}, + "created_by_workspace_arm_id": {"key": "createdByWorkspaceArmId", "type": "str"}, + "expiry_time": {"key": "expiryTime", "type": "iso-8601"}, + "group": {"key": "group", "type": "str"}, + "is_shared_to_all": {"key": "isSharedToAll", "type": "bool"}, + "metadata": {"key": "metadata", "type": "object"}, + "shared_user_list": {"key": "sharedUserList", "type": "[str]"}, + "target": {"key": "target", "type": "str"}, + "credentials": {"key": "credentials", "type": "WorkspaceConnectionPersonalAccessToken"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword category: Category of the connection. Possible values include: "PythonFeed", "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", @@ -26717,8 +25249,8 @@ def __init__( ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPersonalAccessToken """ super(PATAuthTypeWorkspaceConnectionProperties, self).__init__(**kwargs) - self.auth_type = 'PAT' # type: str - self.credentials = kwargs.get('credentials', None) + self.auth_type = "PAT" # type: str + self.credentials = kwargs.get("credentials", None) class PendingUploadCredentialDto(msrest.serialization.Model): @@ -26736,23 +25268,17 @@ class PendingUploadCredentialDto(msrest.serialization.Model): """ _validation = { - 'credential_type': {'required': True}, + "credential_type": {"required": True}, } _attribute_map = { - 'credential_type': {'key': 'credentialType', 'type': 'str'}, + "credential_type": {"key": "credentialType", "type": "str"}, } - _subtype_map = { - 'credential_type': {'SAS': 'SASCredentialDto'} - } + _subtype_map = {"credential_type": {"SAS": "SASCredentialDto"}} - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(PendingUploadCredentialDto, self).__init__(**kwargs) self.credential_type = None # type: Optional[str] @@ -26769,14 +25295,11 @@ class PendingUploadRequestDto(msrest.serialization.Model): """ _attribute_map = { - 'pending_upload_id': {'key': 'pendingUploadId', 'type': 'str'}, - 'pending_upload_type': {'key': 'pendingUploadType', 'type': 'str'}, + "pending_upload_id": {"key": "pendingUploadId", "type": "str"}, + "pending_upload_type": {"key": "pendingUploadType", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword pending_upload_id: If PendingUploadId = null then random guid will be used. :paramtype pending_upload_id: str @@ -26786,8 +25309,8 @@ def __init__( ~azure.mgmt.machinelearningservices.models.PendingUploadType """ super(PendingUploadRequestDto, self).__init__(**kwargs) - self.pending_upload_id = kwargs.get('pending_upload_id', None) - self.pending_upload_type = kwargs.get('pending_upload_type', None) + self.pending_upload_id = kwargs.get("pending_upload_id", None) + self.pending_upload_type = kwargs.get("pending_upload_type", None) class PendingUploadResponseDto(msrest.serialization.Model): @@ -26805,16 +25328,15 @@ class PendingUploadResponseDto(msrest.serialization.Model): """ _attribute_map = { - 'blob_reference_for_consumption': {'key': 'blobReferenceForConsumption', - 'type': 'BlobReferenceForConsumptionDto'}, - 'pending_upload_id': {'key': 'pendingUploadId', 'type': 'str'}, - 'pending_upload_type': {'key': 'pendingUploadType', 'type': 'str'}, + "blob_reference_for_consumption": { + "key": "blobReferenceForConsumption", + "type": "BlobReferenceForConsumptionDto", + }, + "pending_upload_id": {"key": "pendingUploadId", "type": "str"}, + "pending_upload_type": {"key": "pendingUploadType", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword blob_reference_for_consumption: Container level read, write, list SAS. :paramtype blob_reference_for_consumption: @@ -26827,9 +25349,9 @@ def __init__( ~azure.mgmt.machinelearningservices.models.PendingUploadType """ super(PendingUploadResponseDto, self).__init__(**kwargs) - self.blob_reference_for_consumption = kwargs.get('blob_reference_for_consumption', None) - self.pending_upload_id = kwargs.get('pending_upload_id', None) - self.pending_upload_type = kwargs.get('pending_upload_type', None) + self.blob_reference_for_consumption = kwargs.get("blob_reference_for_consumption", None) + self.pending_upload_id = kwargs.get("pending_upload_id", None) + self.pending_upload_type = kwargs.get("pending_upload_type", None) class PersonalComputeInstanceSettings(msrest.serialization.Model): @@ -26840,19 +25362,16 @@ class PersonalComputeInstanceSettings(msrest.serialization.Model): """ _attribute_map = { - 'assigned_user': {'key': 'assignedUser', 'type': 'AssignedUser'}, + "assigned_user": {"key": "assignedUser", "type": "AssignedUser"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword assigned_user: A user explicitly assigned to a personal compute instance. :paramtype assigned_user: ~azure.mgmt.machinelearningservices.models.AssignedUser """ super(PersonalComputeInstanceSettings, self).__init__(**kwargs) - self.assigned_user = kwargs.get('assigned_user', None) + self.assigned_user = kwargs.get("assigned_user", None) class PipelineJob(JobBaseProperties): @@ -26912,36 +25431,33 @@ class PipelineJob(JobBaseProperties): """ _validation = { - 'job_type': {'required': True}, - 'status': {'readonly': True}, + "job_type": {"required": True}, + "status": {"readonly": True}, } _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'component_id': {'key': 'componentId', 'type': 'str'}, - 'compute_id': {'key': 'computeId', 'type': 'str'}, - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'experiment_name': {'key': 'experimentName', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'IdentityConfiguration'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'job_type': {'key': 'jobType', 'type': 'str'}, - 'notification_setting': {'key': 'notificationSetting', 'type': 'NotificationSetting'}, - 'secrets_configuration': {'key': 'secretsConfiguration', 'type': '{SecretConfiguration}'}, - 'services': {'key': 'services', 'type': '{JobService}'}, - 'status': {'key': 'status', 'type': 'str'}, - 'inputs': {'key': 'inputs', 'type': '{JobInput}'}, - 'jobs': {'key': 'jobs', 'type': '{object}'}, - 'outputs': {'key': 'outputs', 'type': '{JobOutput}'}, - 'settings': {'key': 'settings', 'type': 'object'}, - 'source_job_id': {'key': 'sourceJobId', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "component_id": {"key": "componentId", "type": "str"}, + "compute_id": {"key": "computeId", "type": "str"}, + "display_name": {"key": "displayName", "type": "str"}, + "experiment_name": {"key": "experimentName", "type": "str"}, + "identity": {"key": "identity", "type": "IdentityConfiguration"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "job_type": {"key": "jobType", "type": "str"}, + "notification_setting": {"key": "notificationSetting", "type": "NotificationSetting"}, + "secrets_configuration": {"key": "secretsConfiguration", "type": "{SecretConfiguration}"}, + "services": {"key": "services", "type": "{JobService}"}, + "status": {"key": "status", "type": "str"}, + "inputs": {"key": "inputs", "type": "{JobInput}"}, + "jobs": {"key": "jobs", "type": "{object}"}, + "outputs": {"key": "outputs", "type": "{JobOutput}"}, + "settings": {"key": "settings", "type": "object"}, + "source_job_id": {"key": "sourceJobId", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword description: The asset description text. :paramtype description: str @@ -26984,12 +25500,12 @@ def __init__( :paramtype source_job_id: str """ super(PipelineJob, self).__init__(**kwargs) - self.job_type = 'Pipeline' # type: str - self.inputs = kwargs.get('inputs', None) - self.jobs = kwargs.get('jobs', None) - self.outputs = kwargs.get('outputs', None) - self.settings = kwargs.get('settings', None) - self.source_job_id = kwargs.get('source_job_id', None) + self.job_type = "Pipeline" # type: str + self.inputs = kwargs.get("inputs", None) + self.jobs = kwargs.get("jobs", None) + self.outputs = kwargs.get("outputs", None) + self.settings = kwargs.get("settings", None) + self.source_job_id = kwargs.get("source_job_id", None) class PoolEnvironmentConfiguration(msrest.serialization.Model): @@ -27011,17 +25527,14 @@ class PoolEnvironmentConfiguration(msrest.serialization.Model): """ _attribute_map = { - 'environment_id': {'key': 'environmentId', 'type': 'str'}, - 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, - 'liveness_probe': {'key': 'livenessProbe', 'type': 'ProbeSettings'}, - 'readiness_probe': {'key': 'readinessProbe', 'type': 'ProbeSettings'}, - 'startup_probe': {'key': 'startupProbe', 'type': 'ProbeSettings'}, + "environment_id": {"key": "environmentId", "type": "str"}, + "environment_variables": {"key": "environmentVariables", "type": "{str}"}, + "liveness_probe": {"key": "livenessProbe", "type": "ProbeSettings"}, + "readiness_probe": {"key": "readinessProbe", "type": "ProbeSettings"}, + "startup_probe": {"key": "startupProbe", "type": "ProbeSettings"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword environment_id: ARM resource ID of the environment specification for the inference pool. @@ -27039,11 +25552,11 @@ def __init__( :paramtype startup_probe: ~azure.mgmt.machinelearningservices.models.ProbeSettings """ super(PoolEnvironmentConfiguration, self).__init__(**kwargs) - self.environment_id = kwargs.get('environment_id', None) - self.environment_variables = kwargs.get('environment_variables', None) - self.liveness_probe = kwargs.get('liveness_probe', None) - self.readiness_probe = kwargs.get('readiness_probe', None) - self.startup_probe = kwargs.get('startup_probe', None) + self.environment_id = kwargs.get("environment_id", None) + self.environment_variables = kwargs.get("environment_variables", None) + self.liveness_probe = kwargs.get("liveness_probe", None) + self.readiness_probe = kwargs.get("readiness_probe", None) + self.startup_probe = kwargs.get("startup_probe", None) class PoolModelConfiguration(msrest.serialization.Model): @@ -27054,19 +25567,16 @@ class PoolModelConfiguration(msrest.serialization.Model): """ _attribute_map = { - 'model_id': {'key': 'modelId', 'type': 'str'}, + "model_id": {"key": "modelId", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword model_id: The URI path to the model. :paramtype model_id: str """ super(PoolModelConfiguration, self).__init__(**kwargs) - self.model_id = kwargs.get('model_id', None) + self.model_id = kwargs.get("model_id", None) class PoolStatus(msrest.serialization.Model): @@ -27084,16 +25594,13 @@ class PoolStatus(msrest.serialization.Model): """ _attribute_map = { - 'actual_capacity': {'key': 'actualCapacity', 'type': 'int'}, - 'group_count': {'key': 'groupCount', 'type': 'int'}, - 'requested_capacity': {'key': 'requestedCapacity', 'type': 'int'}, - 'reserved_capacity': {'key': 'reservedCapacity', 'type': 'int'}, + "actual_capacity": {"key": "actualCapacity", "type": "int"}, + "group_count": {"key": "groupCount", "type": "int"}, + "requested_capacity": {"key": "requestedCapacity", "type": "int"}, + "reserved_capacity": {"key": "reservedCapacity", "type": "int"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword actual_capacity: Gets or sets the actual number of instances in the pool. :paramtype actual_capacity: int @@ -27106,10 +25613,10 @@ def __init__( :paramtype reserved_capacity: int """ super(PoolStatus, self).__init__(**kwargs) - self.actual_capacity = kwargs.get('actual_capacity', 0) - self.group_count = kwargs.get('group_count', 0) - self.requested_capacity = kwargs.get('requested_capacity', 0) - self.reserved_capacity = kwargs.get('reserved_capacity', 0) + self.actual_capacity = kwargs.get("actual_capacity", 0) + self.group_count = kwargs.get("group_count", 0) + self.requested_capacity = kwargs.get("requested_capacity", 0) + self.reserved_capacity = kwargs.get("reserved_capacity", 0) class PredictionDriftMonitoringSignal(MonitoringSignalBase): @@ -27142,26 +25649,23 @@ class PredictionDriftMonitoringSignal(MonitoringSignalBase): """ _validation = { - 'signal_type': {'required': True}, - 'metric_thresholds': {'required': True}, - 'production_data': {'required': True}, - 'reference_data': {'required': True}, + "signal_type": {"required": True}, + "metric_thresholds": {"required": True}, + "production_data": {"required": True}, + "reference_data": {"required": True}, } _attribute_map = { - 'notification_types': {'key': 'notificationTypes', 'type': '[str]'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'signal_type': {'key': 'signalType', 'type': 'str'}, - 'feature_data_type_override': {'key': 'featureDataTypeOverride', 'type': '{str}'}, - 'metric_thresholds': {'key': 'metricThresholds', 'type': '[PredictionDriftMetricThresholdBase]'}, - 'production_data': {'key': 'productionData', 'type': 'MonitoringInputDataBase'}, - 'reference_data': {'key': 'referenceData', 'type': 'MonitoringInputDataBase'}, + "notification_types": {"key": "notificationTypes", "type": "[str]"}, + "properties": {"key": "properties", "type": "{str}"}, + "signal_type": {"key": "signalType", "type": "str"}, + "feature_data_type_override": {"key": "featureDataTypeOverride", "type": "{str}"}, + "metric_thresholds": {"key": "metricThresholds", "type": "[PredictionDriftMetricThresholdBase]"}, + "production_data": {"key": "productionData", "type": "MonitoringInputDataBase"}, + "reference_data": {"key": "referenceData", "type": "MonitoringInputDataBase"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword notification_types: The current notification mode for this signal. :paramtype notification_types: list[str or @@ -27182,11 +25686,11 @@ def __init__( :paramtype reference_data: ~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase """ super(PredictionDriftMonitoringSignal, self).__init__(**kwargs) - self.signal_type = 'PredictionDrift' # type: str - self.feature_data_type_override = kwargs.get('feature_data_type_override', None) - self.metric_thresholds = kwargs['metric_thresholds'] - self.production_data = kwargs['production_data'] - self.reference_data = kwargs['reference_data'] + self.signal_type = "PredictionDrift" # type: str + self.feature_data_type_override = kwargs.get("feature_data_type_override", None) + self.metric_thresholds = kwargs["metric_thresholds"] + self.production_data = kwargs["production_data"] + self.reference_data = kwargs["reference_data"] class PrivateEndpoint(msrest.serialization.Model): @@ -27199,19 +25703,15 @@ class PrivateEndpoint(msrest.serialization.Model): """ _validation = { - 'id': {'readonly': True}, + "id": {"readonly": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, + "id": {"key": "id", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(PrivateEndpoint, self).__init__(**kwargs) self.id = None @@ -27254,31 +25754,30 @@ class PrivateEndpointConnection(Resource): """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'}, - 'location': {'key': 'location', 'type': 'str'}, - 'sku': {'key': 'sku', 'type': 'Sku'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'private_endpoint': {'key': 'properties.privateEndpoint', 'type': 'WorkspacePrivateEndpointResource'}, - 'private_link_service_connection_state': {'key': 'properties.privateLinkServiceConnectionState', - 'type': 'PrivateLinkServiceConnectionState'}, - 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "identity": {"key": "identity", "type": "ManagedServiceIdentity"}, + "location": {"key": "location", "type": "str"}, + "sku": {"key": "sku", "type": "Sku"}, + "tags": {"key": "tags", "type": "{str}"}, + "private_endpoint": {"key": "properties.privateEndpoint", "type": "WorkspacePrivateEndpointResource"}, + "private_link_service_connection_state": { + "key": "properties.privateLinkServiceConnectionState", + "type": "PrivateLinkServiceConnectionState", + }, + "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword identity: Managed service identity (system assigned and/or user assigned identities). :paramtype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity @@ -27301,13 +25800,13 @@ def __init__( ~azure.mgmt.machinelearningservices.models.PrivateEndpointConnectionProvisioningState """ super(PrivateEndpointConnection, self).__init__(**kwargs) - self.identity = kwargs.get('identity', None) - self.location = kwargs.get('location', None) - self.sku = kwargs.get('sku', None) - self.tags = kwargs.get('tags', None) - self.private_endpoint = kwargs.get('private_endpoint', None) - self.private_link_service_connection_state = kwargs.get('private_link_service_connection_state', None) - self.provisioning_state = kwargs.get('provisioning_state', None) + self.identity = kwargs.get("identity", None) + self.location = kwargs.get("location", None) + self.sku = kwargs.get("sku", None) + self.tags = kwargs.get("tags", None) + self.private_endpoint = kwargs.get("private_endpoint", None) + self.private_link_service_connection_state = kwargs.get("private_link_service_connection_state", None) + self.provisioning_state = kwargs.get("provisioning_state", None) class PrivateEndpointConnectionListResult(msrest.serialization.Model): @@ -27318,19 +25817,16 @@ class PrivateEndpointConnectionListResult(msrest.serialization.Model): """ _attribute_map = { - 'value': {'key': 'value', 'type': '[PrivateEndpointConnection]'}, + "value": {"key": "value", "type": "[PrivateEndpointConnection]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword value: Array of private endpoint connections. :paramtype value: list[~azure.mgmt.machinelearningservices.models.PrivateEndpointConnection] """ super(PrivateEndpointConnectionListResult, self).__init__(**kwargs) - self.value = kwargs.get('value', None) + self.value = kwargs.get("value", None) class PrivateEndpointDestination(msrest.serialization.Model): @@ -27348,16 +25844,13 @@ class PrivateEndpointDestination(msrest.serialization.Model): """ _attribute_map = { - 'service_resource_id': {'key': 'serviceResourceId', 'type': 'str'}, - 'spark_enabled': {'key': 'sparkEnabled', 'type': 'bool'}, - 'spark_status': {'key': 'sparkStatus', 'type': 'str'}, - 'subresource_target': {'key': 'subresourceTarget', 'type': 'str'}, + "service_resource_id": {"key": "serviceResourceId", "type": "str"}, + "spark_enabled": {"key": "sparkEnabled", "type": "bool"}, + "spark_status": {"key": "sparkStatus", "type": "str"}, + "subresource_target": {"key": "subresourceTarget", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword service_resource_id: :paramtype service_resource_id: str @@ -27370,10 +25863,10 @@ def __init__( :paramtype subresource_target: str """ super(PrivateEndpointDestination, self).__init__(**kwargs) - self.service_resource_id = kwargs.get('service_resource_id', None) - self.spark_enabled = kwargs.get('spark_enabled', None) - self.spark_status = kwargs.get('spark_status', None) - self.subresource_target = kwargs.get('subresource_target', None) + self.service_resource_id = kwargs.get("service_resource_id", None) + self.spark_enabled = kwargs.get("spark_enabled", None) + self.spark_status = kwargs.get("spark_status", None) + self.subresource_target = kwargs.get("subresource_target", None) class PrivateEndpointOutboundRule(OutboundRule): @@ -27397,20 +25890,17 @@ class PrivateEndpointOutboundRule(OutboundRule): """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'category': {'key': 'category', 'type': 'str'}, - 'status': {'key': 'status', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'destination': {'key': 'destination', 'type': 'PrivateEndpointDestination'}, + "category": {"key": "category", "type": "str"}, + "status": {"key": "status", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "destination": {"key": "destination", "type": "PrivateEndpointDestination"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword category: Category of a managed network Outbound Rule of a machine learning workspace. Possible values include: "Required", "Recommended", "UserDefined". @@ -27423,8 +25913,8 @@ def __init__( :paramtype destination: ~azure.mgmt.machinelearningservices.models.PrivateEndpointDestination """ super(PrivateEndpointOutboundRule, self).__init__(**kwargs) - self.type = 'PrivateEndpoint' # type: str - self.destination = kwargs.get('destination', None) + self.type = "PrivateEndpoint" # type: str + self.destination = kwargs.get("destination", None) class PrivateEndpointResource(PrivateEndpoint): @@ -27439,24 +25929,21 @@ class PrivateEndpointResource(PrivateEndpoint): """ _validation = { - 'id': {'readonly': True}, + "id": {"readonly": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'subnet_arm_id': {'key': 'subnetArmId', 'type': 'str'}, + "id": {"key": "id", "type": "str"}, + "subnet_arm_id": {"key": "subnetArmId", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword subnet_arm_id: The subnetId that the private endpoint is connected to. :paramtype subnet_arm_id: str """ super(PrivateEndpointResource, self).__init__(**kwargs) - self.subnet_arm_id = kwargs.get('subnet_arm_id', None) + self.subnet_arm_id = kwargs.get("subnet_arm_id", None) class PrivateLinkResource(Resource): @@ -27493,30 +25980,27 @@ class PrivateLinkResource(Resource): """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'}, - 'location': {'key': 'location', 'type': 'str'}, - 'sku': {'key': 'sku', 'type': 'Sku'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'group_id': {'key': 'properties.groupId', 'type': 'str'}, - 'required_members': {'key': 'properties.requiredMembers', 'type': '[str]'}, - 'required_zone_names': {'key': 'properties.requiredZoneNames', 'type': '[str]'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "identity": {"key": "identity", "type": "ManagedServiceIdentity"}, + "location": {"key": "location", "type": "str"}, + "sku": {"key": "sku", "type": "Sku"}, + "tags": {"key": "tags", "type": "{str}"}, + "group_id": {"key": "properties.groupId", "type": "str"}, + "required_members": {"key": "properties.requiredMembers", "type": "[str]"}, + "required_zone_names": {"key": "properties.requiredZoneNames", "type": "[str]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword identity: Managed service identity (system assigned and/or user assigned identities). :paramtype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity @@ -27535,13 +26019,13 @@ def __init__( :paramtype required_zone_names: list[str] """ super(PrivateLinkResource, self).__init__(**kwargs) - self.identity = kwargs.get('identity', None) - self.location = kwargs.get('location', None) - self.sku = kwargs.get('sku', None) - self.tags = kwargs.get('tags', None) - self.group_id = kwargs.get('group_id', None) - self.required_members = kwargs.get('required_members', None) - self.required_zone_names = kwargs.get('required_zone_names', None) + self.identity = kwargs.get("identity", None) + self.location = kwargs.get("location", None) + self.sku = kwargs.get("sku", None) + self.tags = kwargs.get("tags", None) + self.group_id = kwargs.get("group_id", None) + self.required_members = kwargs.get("required_members", None) + self.required_zone_names = kwargs.get("required_zone_names", None) class PrivateLinkResourceListResult(msrest.serialization.Model): @@ -27552,19 +26036,16 @@ class PrivateLinkResourceListResult(msrest.serialization.Model): """ _attribute_map = { - 'value': {'key': 'value', 'type': '[PrivateLinkResource]'}, + "value": {"key": "value", "type": "[PrivateLinkResource]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword value: :paramtype value: list[~azure.mgmt.machinelearningservices.models.PrivateLinkResource] """ super(PrivateLinkResourceListResult, self).__init__(**kwargs) - self.value = kwargs.get('value', None) + self.value = kwargs.get("value", None) class PrivateLinkServiceConnectionState(msrest.serialization.Model): @@ -27582,15 +26063,12 @@ class PrivateLinkServiceConnectionState(msrest.serialization.Model): """ _attribute_map = { - 'actions_required': {'key': 'actionsRequired', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'status': {'key': 'status', 'type': 'str'}, + "actions_required": {"key": "actionsRequired", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "status": {"key": "status", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword actions_required: Some RP chose "None". Other RPs use this for region expansion. :paramtype actions_required: str @@ -27603,9 +26081,9 @@ def __init__( ~azure.mgmt.machinelearningservices.models.EndpointServiceConnectionStatus """ super(PrivateLinkServiceConnectionState, self).__init__(**kwargs) - self.actions_required = kwargs.get('actions_required', None) - self.description = kwargs.get('description', None) - self.status = kwargs.get('status', None) + self.actions_required = kwargs.get("actions_required", None) + self.description = kwargs.get("description", None) + self.status = kwargs.get("status", None) class ProbeSettings(msrest.serialization.Model): @@ -27624,17 +26102,14 @@ class ProbeSettings(msrest.serialization.Model): """ _attribute_map = { - 'failure_threshold': {'key': 'failureThreshold', 'type': 'int'}, - 'initial_delay': {'key': 'initialDelay', 'type': 'duration'}, - 'period': {'key': 'period', 'type': 'duration'}, - 'success_threshold': {'key': 'successThreshold', 'type': 'int'}, - 'timeout': {'key': 'timeout', 'type': 'duration'}, + "failure_threshold": {"key": "failureThreshold", "type": "int"}, + "initial_delay": {"key": "initialDelay", "type": "duration"}, + "period": {"key": "period", "type": "duration"}, + "success_threshold": {"key": "successThreshold", "type": "int"}, + "timeout": {"key": "timeout", "type": "duration"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword failure_threshold: The number of failures to allow before returning an unhealthy status. @@ -27649,11 +26124,11 @@ def __init__( :paramtype timeout: ~datetime.timedelta """ super(ProbeSettings, self).__init__(**kwargs) - self.failure_threshold = kwargs.get('failure_threshold', 30) - self.initial_delay = kwargs.get('initial_delay', None) - self.period = kwargs.get('period', "PT10S") - self.success_threshold = kwargs.get('success_threshold', 1) - self.timeout = kwargs.get('timeout', "PT2S") + self.failure_threshold = kwargs.get("failure_threshold", 30) + self.initial_delay = kwargs.get("initial_delay", None) + self.period = kwargs.get("period", "PT10S") + self.success_threshold = kwargs.get("success_threshold", 1) + self.timeout = kwargs.get("timeout", "PT2S") class ProgressMetrics(msrest.serialization.Model): @@ -27673,25 +26148,21 @@ class ProgressMetrics(msrest.serialization.Model): """ _validation = { - 'completed_datapoint_count': {'readonly': True}, - 'incremental_data_last_refresh_date_time': {'readonly': True}, - 'skipped_datapoint_count': {'readonly': True}, - 'total_datapoint_count': {'readonly': True}, + "completed_datapoint_count": {"readonly": True}, + "incremental_data_last_refresh_date_time": {"readonly": True}, + "skipped_datapoint_count": {"readonly": True}, + "total_datapoint_count": {"readonly": True}, } _attribute_map = { - 'completed_datapoint_count': {'key': 'completedDatapointCount', 'type': 'long'}, - 'incremental_data_last_refresh_date_time': {'key': 'incrementalDataLastRefreshDateTime', 'type': 'iso-8601'}, - 'skipped_datapoint_count': {'key': 'skippedDatapointCount', 'type': 'long'}, - 'total_datapoint_count': {'key': 'totalDatapointCount', 'type': 'long'}, + "completed_datapoint_count": {"key": "completedDatapointCount", "type": "long"}, + "incremental_data_last_refresh_date_time": {"key": "incrementalDataLastRefreshDateTime", "type": "iso-8601"}, + "skipped_datapoint_count": {"key": "skippedDatapointCount", "type": "long"}, + "total_datapoint_count": {"key": "totalDatapointCount", "type": "long"}, } - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(ProgressMetrics, self).__init__(**kwargs) self.completed_datapoint_count = None self.incremental_data_last_refresh_date_time = None @@ -27713,25 +26184,22 @@ class PyTorch(DistributionConfiguration): """ _validation = { - 'distribution_type': {'required': True}, + "distribution_type": {"required": True}, } _attribute_map = { - 'distribution_type': {'key': 'distributionType', 'type': 'str'}, - 'process_count_per_instance': {'key': 'processCountPerInstance', 'type': 'int'}, + "distribution_type": {"key": "distributionType", "type": "str"}, + "process_count_per_instance": {"key": "processCountPerInstance", "type": "int"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword process_count_per_instance: Number of processes per node. :paramtype process_count_per_instance: int """ super(PyTorch, self).__init__(**kwargs) - self.distribution_type = 'PyTorch' # type: str - self.process_count_per_instance = kwargs.get('process_count_per_instance', None) + self.distribution_type = "PyTorch" # type: str + self.process_count_per_instance = kwargs.get("process_count_per_instance", None) class QueueSettings(msrest.serialization.Model): @@ -27745,14 +26213,11 @@ class QueueSettings(msrest.serialization.Model): """ _attribute_map = { - 'job_tier': {'key': 'jobTier', 'type': 'str'}, - 'priority': {'key': 'priority', 'type': 'int'}, + "job_tier": {"key": "jobTier", "type": "str"}, + "priority": {"key": "priority", "type": "int"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword job_tier: Controls the compute job tier. Possible values include: "Null", "Spot", "Basic", "Standard", "Premium". @@ -27761,8 +26226,8 @@ def __init__( :paramtype priority: int """ super(QueueSettings, self).__init__(**kwargs) - self.job_tier = kwargs.get('job_tier', None) - self.priority = kwargs.get('priority', None) + self.job_tier = kwargs.get("job_tier", None) + self.priority = kwargs.get("priority", None) class QuotaBaseProperties(msrest.serialization.Model): @@ -27779,16 +26244,13 @@ class QuotaBaseProperties(msrest.serialization.Model): """ _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'limit': {'key': 'limit', 'type': 'long'}, - 'unit': {'key': 'unit', 'type': 'str'}, + "id": {"key": "id", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "limit": {"key": "limit", "type": "long"}, + "unit": {"key": "unit", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword id: Specifies the resource ID. :paramtype id: str @@ -27801,10 +26263,10 @@ def __init__( :paramtype unit: str or ~azure.mgmt.machinelearningservices.models.QuotaUnit """ super(QuotaBaseProperties, self).__init__(**kwargs) - self.id = kwargs.get('id', None) - self.type = kwargs.get('type', None) - self.limit = kwargs.get('limit', None) - self.unit = kwargs.get('unit', None) + self.id = kwargs.get("id", None) + self.type = kwargs.get("type", None) + self.limit = kwargs.get("limit", None) + self.unit = kwargs.get("unit", None) class QuotaUpdateParameters(msrest.serialization.Model): @@ -27817,14 +26279,11 @@ class QuotaUpdateParameters(msrest.serialization.Model): """ _attribute_map = { - 'value': {'key': 'value', 'type': '[QuotaBaseProperties]'}, - 'location': {'key': 'location', 'type': 'str'}, + "value": {"key": "value", "type": "[QuotaBaseProperties]"}, + "location": {"key": "location", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword value: The list for update quota. :paramtype value: list[~azure.mgmt.machinelearningservices.models.QuotaBaseProperties] @@ -27832,8 +26291,8 @@ def __init__( :paramtype location: str """ super(QuotaUpdateParameters, self).__init__(**kwargs) - self.value = kwargs.get('value', None) - self.location = kwargs.get('location', None) + self.value = kwargs.get("value", None) + self.location = kwargs.get("location", None) class RandomSamplingAlgorithm(SamplingAlgorithm): @@ -27856,20 +26315,17 @@ class RandomSamplingAlgorithm(SamplingAlgorithm): """ _validation = { - 'sampling_algorithm_type': {'required': True}, + "sampling_algorithm_type": {"required": True}, } _attribute_map = { - 'sampling_algorithm_type': {'key': 'samplingAlgorithmType', 'type': 'str'}, - 'logbase': {'key': 'logbase', 'type': 'str'}, - 'rule': {'key': 'rule', 'type': 'str'}, - 'seed': {'key': 'seed', 'type': 'int'}, + "sampling_algorithm_type": {"key": "samplingAlgorithmType", "type": "str"}, + "logbase": {"key": "logbase", "type": "str"}, + "rule": {"key": "rule", "type": "str"}, + "seed": {"key": "seed", "type": "int"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword logbase: An optional positive number or e in string format to be used as base for log based random sampling. @@ -27881,10 +26337,10 @@ def __init__( :paramtype seed: int """ super(RandomSamplingAlgorithm, self).__init__(**kwargs) - self.sampling_algorithm_type = 'Random' # type: str - self.logbase = kwargs.get('logbase', None) - self.rule = kwargs.get('rule', None) - self.seed = kwargs.get('seed', None) + self.sampling_algorithm_type = "Random" # type: str + self.logbase = kwargs.get("logbase", None) + self.rule = kwargs.get("rule", None) + self.seed = kwargs.get("seed", None) class Ray(DistributionConfiguration): @@ -27911,23 +26367,20 @@ class Ray(DistributionConfiguration): """ _validation = { - 'distribution_type': {'required': True}, + "distribution_type": {"required": True}, } _attribute_map = { - 'distribution_type': {'key': 'distributionType', 'type': 'str'}, - 'address': {'key': 'address', 'type': 'str'}, - 'dashboard_port': {'key': 'dashboardPort', 'type': 'int'}, - 'head_node_additional_args': {'key': 'headNodeAdditionalArgs', 'type': 'str'}, - 'include_dashboard': {'key': 'includeDashboard', 'type': 'bool'}, - 'port': {'key': 'port', 'type': 'int'}, - 'worker_node_additional_args': {'key': 'workerNodeAdditionalArgs', 'type': 'str'}, + "distribution_type": {"key": "distributionType", "type": "str"}, + "address": {"key": "address", "type": "str"}, + "dashboard_port": {"key": "dashboardPort", "type": "int"}, + "head_node_additional_args": {"key": "headNodeAdditionalArgs", "type": "str"}, + "include_dashboard": {"key": "includeDashboard", "type": "bool"}, + "port": {"key": "port", "type": "int"}, + "worker_node_additional_args": {"key": "workerNodeAdditionalArgs", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword address: The address of Ray head node. :paramtype address: str @@ -27943,13 +26396,13 @@ def __init__( :paramtype worker_node_additional_args: str """ super(Ray, self).__init__(**kwargs) - self.distribution_type = 'Ray' # type: str - self.address = kwargs.get('address', None) - self.dashboard_port = kwargs.get('dashboard_port', None) - self.head_node_additional_args = kwargs.get('head_node_additional_args', None) - self.include_dashboard = kwargs.get('include_dashboard', None) - self.port = kwargs.get('port', None) - self.worker_node_additional_args = kwargs.get('worker_node_additional_args', None) + self.distribution_type = "Ray" # type: str + self.address = kwargs.get("address", None) + self.dashboard_port = kwargs.get("dashboard_port", None) + self.head_node_additional_args = kwargs.get("head_node_additional_args", None) + self.include_dashboard = kwargs.get("include_dashboard", None) + self.port = kwargs.get("port", None) + self.worker_node_additional_args = kwargs.get("worker_node_additional_args", None) class Recurrence(msrest.serialization.Model): @@ -27972,17 +26425,14 @@ class Recurrence(msrest.serialization.Model): """ _attribute_map = { - 'frequency': {'key': 'frequency', 'type': 'str'}, - 'interval': {'key': 'interval', 'type': 'int'}, - 'start_time': {'key': 'startTime', 'type': 'str'}, - 'time_zone': {'key': 'timeZone', 'type': 'str'}, - 'schedule': {'key': 'schedule', 'type': 'ComputeRecurrenceSchedule'}, + "frequency": {"key": "frequency", "type": "str"}, + "interval": {"key": "interval", "type": "int"}, + "start_time": {"key": "startTime", "type": "str"}, + "time_zone": {"key": "timeZone", "type": "str"}, + "schedule": {"key": "schedule", "type": "ComputeRecurrenceSchedule"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword frequency: [Required] The frequency to trigger schedule. Possible values include: "Minute", "Hour", "Day", "Week", "Month". @@ -28000,11 +26450,11 @@ def __init__( :paramtype schedule: ~azure.mgmt.machinelearningservices.models.ComputeRecurrenceSchedule """ super(Recurrence, self).__init__(**kwargs) - self.frequency = kwargs.get('frequency', None) - self.interval = kwargs.get('interval', None) - self.start_time = kwargs.get('start_time', None) - self.time_zone = kwargs.get('time_zone', "UTC") - self.schedule = kwargs.get('schedule', None) + self.frequency = kwargs.get("frequency", None) + self.interval = kwargs.get("interval", None) + self.start_time = kwargs.get("start_time", None) + self.time_zone = kwargs.get("time_zone", "UTC") + self.schedule = kwargs.get("schedule", None) class RecurrenceSchedule(msrest.serialization.Model): @@ -28023,21 +26473,18 @@ class RecurrenceSchedule(msrest.serialization.Model): """ _validation = { - 'hours': {'required': True}, - 'minutes': {'required': True}, + "hours": {"required": True}, + "minutes": {"required": True}, } _attribute_map = { - 'hours': {'key': 'hours', 'type': '[int]'}, - 'minutes': {'key': 'minutes', 'type': '[int]'}, - 'month_days': {'key': 'monthDays', 'type': '[int]'}, - 'week_days': {'key': 'weekDays', 'type': '[str]'}, + "hours": {"key": "hours", "type": "[int]"}, + "minutes": {"key": "minutes", "type": "[int]"}, + "month_days": {"key": "monthDays", "type": "[int]"}, + "week_days": {"key": "weekDays", "type": "[str]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword hours: Required. [Required] List of hours for the schedule. :paramtype hours: list[int] @@ -28049,10 +26496,10 @@ def __init__( :paramtype week_days: list[str or ~azure.mgmt.machinelearningservices.models.WeekDay] """ super(RecurrenceSchedule, self).__init__(**kwargs) - self.hours = kwargs['hours'] - self.minutes = kwargs['minutes'] - self.month_days = kwargs.get('month_days', None) - self.week_days = kwargs.get('week_days', None) + self.hours = kwargs["hours"] + self.minutes = kwargs["minutes"] + self.month_days = kwargs.get("month_days", None) + self.week_days = kwargs.get("week_days", None) class RecurrenceTrigger(TriggerBase): @@ -28085,25 +26532,22 @@ class RecurrenceTrigger(TriggerBase): """ _validation = { - 'trigger_type': {'required': True}, - 'frequency': {'required': True}, - 'interval': {'required': True}, + "trigger_type": {"required": True}, + "frequency": {"required": True}, + "interval": {"required": True}, } _attribute_map = { - 'end_time': {'key': 'endTime', 'type': 'str'}, - 'start_time': {'key': 'startTime', 'type': 'str'}, - 'time_zone': {'key': 'timeZone', 'type': 'str'}, - 'trigger_type': {'key': 'triggerType', 'type': 'str'}, - 'frequency': {'key': 'frequency', 'type': 'str'}, - 'interval': {'key': 'interval', 'type': 'int'}, - 'schedule': {'key': 'schedule', 'type': 'RecurrenceSchedule'}, + "end_time": {"key": "endTime", "type": "str"}, + "start_time": {"key": "startTime", "type": "str"}, + "time_zone": {"key": "timeZone", "type": "str"}, + "trigger_type": {"key": "triggerType", "type": "str"}, + "frequency": {"key": "frequency", "type": "str"}, + "interval": {"key": "interval", "type": "int"}, + "schedule": {"key": "schedule", "type": "RecurrenceSchedule"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword end_time: Specifies end time of schedule in ISO 8601, but without a UTC offset. Refer https://en.wikipedia.org/wiki/ISO_8601. @@ -28127,10 +26571,10 @@ def __init__( :paramtype schedule: ~azure.mgmt.machinelearningservices.models.RecurrenceSchedule """ super(RecurrenceTrigger, self).__init__(**kwargs) - self.trigger_type = 'Recurrence' # type: str - self.frequency = kwargs['frequency'] - self.interval = kwargs['interval'] - self.schedule = kwargs.get('schedule', None) + self.trigger_type = "Recurrence" # type: str + self.frequency = kwargs["frequency"] + self.interval = kwargs["interval"] + self.schedule = kwargs.get("schedule", None) class RegenerateEndpointKeysRequest(msrest.serialization.Model): @@ -28146,18 +26590,15 @@ class RegenerateEndpointKeysRequest(msrest.serialization.Model): """ _validation = { - 'key_type': {'required': True}, + "key_type": {"required": True}, } _attribute_map = { - 'key_type': {'key': 'keyType', 'type': 'str'}, - 'key_value': {'key': 'keyValue', 'type': 'str'}, + "key_type": {"key": "keyType", "type": "str"}, + "key_value": {"key": "keyValue", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword key_type: Required. [Required] Specification for which type of key to generate. Primary or Secondary. Possible values include: "Primary", "Secondary". @@ -28166,8 +26607,8 @@ def __init__( :paramtype key_value: str """ super(RegenerateEndpointKeysRequest, self).__init__(**kwargs) - self.key_type = kwargs['key_type'] - self.key_value = kwargs.get('key_value', None) + self.key_type = kwargs["key_type"] + self.key_value = kwargs.get("key_value", None) class RegenerateServiceAccountKeyContent(msrest.serialization.Model): @@ -28178,19 +26619,16 @@ class RegenerateServiceAccountKeyContent(msrest.serialization.Model): """ _attribute_map = { - 'key_name': {'key': 'keyName', 'type': 'str'}, + "key_name": {"key": "keyName", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword key_name: Possible values include: "Key1", "Key2". :paramtype key_name: str or ~azure.mgmt.machinelearningservices.models.ServiceAccountKeyName """ super(RegenerateServiceAccountKeyContent, self).__init__(**kwargs) - self.key_name = kwargs.get('key_name', None) + self.key_name = kwargs.get("key_name", None) class Registry(TrackedResource): @@ -28247,39 +26685,40 @@ class Registry(TrackedResource): """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'location': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "location": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'location': {'key': 'location', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'}, - 'kind': {'key': 'kind', 'type': 'str'}, - 'sku': {'key': 'sku', 'type': 'Sku'}, - 'discovery_url': {'key': 'properties.discoveryUrl', 'type': 'str'}, - 'intellectual_property_publisher': {'key': 'properties.intellectualPropertyPublisher', 'type': 'str'}, - 'managed_resource_group': {'key': 'properties.managedResourceGroup', 'type': 'ArmResourceId'}, - 'managed_resource_group_settings': {'key': 'properties.managedResourceGroupSettings', - 'type': 'ManagedResourceGroupSettings'}, - 'ml_flow_registry_uri': {'key': 'properties.mlFlowRegistryUri', 'type': 'str'}, - 'registry_private_endpoint_connections': {'key': 'properties.registryPrivateEndpointConnections', - 'type': '[RegistryPrivateEndpointConnection]'}, - 'public_network_access': {'key': 'properties.publicNetworkAccess', 'type': 'str'}, - 'region_details': {'key': 'properties.regionDetails', 'type': '[RegistryRegionArmDetails]'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "tags": {"key": "tags", "type": "{str}"}, + "location": {"key": "location", "type": "str"}, + "identity": {"key": "identity", "type": "ManagedServiceIdentity"}, + "kind": {"key": "kind", "type": "str"}, + "sku": {"key": "sku", "type": "Sku"}, + "discovery_url": {"key": "properties.discoveryUrl", "type": "str"}, + "intellectual_property_publisher": {"key": "properties.intellectualPropertyPublisher", "type": "str"}, + "managed_resource_group": {"key": "properties.managedResourceGroup", "type": "ArmResourceId"}, + "managed_resource_group_settings": { + "key": "properties.managedResourceGroupSettings", + "type": "ManagedResourceGroupSettings", + }, + "ml_flow_registry_uri": {"key": "properties.mlFlowRegistryUri", "type": "str"}, + "registry_private_endpoint_connections": { + "key": "properties.registryPrivateEndpointConnections", + "type": "[RegistryPrivateEndpointConnection]", + }, + "public_network_access": {"key": "properties.publicNetworkAccess", "type": "str"}, + "region_details": {"key": "properties.regionDetails", "type": "[RegistryRegionArmDetails]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword tags: A set of tags. Resource tags. :paramtype tags: dict[str, str] @@ -28316,17 +26755,17 @@ def __init__( list[~azure.mgmt.machinelearningservices.models.RegistryRegionArmDetails] """ super(Registry, self).__init__(**kwargs) - self.identity = kwargs.get('identity', None) - self.kind = kwargs.get('kind', None) - self.sku = kwargs.get('sku', None) - self.discovery_url = kwargs.get('discovery_url', None) - self.intellectual_property_publisher = kwargs.get('intellectual_property_publisher', None) - self.managed_resource_group = kwargs.get('managed_resource_group', None) - self.managed_resource_group_settings = kwargs.get('managed_resource_group_settings', None) - self.ml_flow_registry_uri = kwargs.get('ml_flow_registry_uri', None) - self.registry_private_endpoint_connections = kwargs.get('registry_private_endpoint_connections', None) - self.public_network_access = kwargs.get('public_network_access', None) - self.region_details = kwargs.get('region_details', None) + self.identity = kwargs.get("identity", None) + self.kind = kwargs.get("kind", None) + self.sku = kwargs.get("sku", None) + self.discovery_url = kwargs.get("discovery_url", None) + self.intellectual_property_publisher = kwargs.get("intellectual_property_publisher", None) + self.managed_resource_group = kwargs.get("managed_resource_group", None) + self.managed_resource_group_settings = kwargs.get("managed_resource_group_settings", None) + self.ml_flow_registry_uri = kwargs.get("ml_flow_registry_uri", None) + self.registry_private_endpoint_connections = kwargs.get("registry_private_endpoint_connections", None) + self.public_network_access = kwargs.get("public_network_access", None) + self.region_details = kwargs.get("region_details", None) class RegistryListCredentialsResult(msrest.serialization.Model): @@ -28343,27 +26782,24 @@ class RegistryListCredentialsResult(msrest.serialization.Model): """ _validation = { - 'location': {'readonly': True}, - 'username': {'readonly': True}, + "location": {"readonly": True}, + "username": {"readonly": True}, } _attribute_map = { - 'location': {'key': 'location', 'type': 'str'}, - 'passwords': {'key': 'passwords', 'type': '[Password]'}, - 'username': {'key': 'username', 'type': 'str'}, + "location": {"key": "location", "type": "str"}, + "passwords": {"key": "passwords", "type": "[Password]"}, + "username": {"key": "username", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword passwords: :paramtype passwords: list[~azure.mgmt.machinelearningservices.models.Password] """ super(RegistryListCredentialsResult, self).__init__(**kwargs) self.location = None - self.passwords = kwargs.get('passwords', None) + self.passwords = kwargs.get("passwords", None) self.username = None @@ -28393,22 +26829,19 @@ class RegistryPartialManagedServiceIdentity(ManagedServiceIdentity): """ _validation = { - 'principal_id': {'readonly': True}, - 'tenant_id': {'readonly': True}, - 'type': {'required': True}, + "principal_id": {"readonly": True}, + "tenant_id": {"readonly": True}, + "type": {"required": True}, } _attribute_map = { - 'principal_id': {'key': 'principalId', 'type': 'str'}, - 'tenant_id': {'key': 'tenantId', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'user_assigned_identities': {'key': 'userAssignedIdentities', 'type': '{UserAssignedIdentity}'}, + "principal_id": {"key": "principalId", "type": "str"}, + "tenant_id": {"key": "tenantId", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "user_assigned_identities": {"key": "userAssignedIdentities", "type": "{UserAssignedIdentity}"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword type: Required. Type of managed service identity (where both SystemAssigned and UserAssigned types are allowed). Possible values include: "None", "SystemAssigned", @@ -28446,20 +26879,18 @@ class RegistryPrivateEndpointConnection(msrest.serialization.Model): """ _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'location': {'key': 'location', 'type': 'str'}, - 'group_ids': {'key': 'properties.groupIds', 'type': '[str]'}, - 'private_endpoint': {'key': 'properties.privateEndpoint', 'type': 'PrivateEndpointResource'}, - 'registry_private_link_service_connection_state': { - 'key': 'properties.registryPrivateLinkServiceConnectionState', - 'type': 'RegistryPrivateLinkServiceConnectionState'}, - 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + "id": {"key": "id", "type": "str"}, + "location": {"key": "location", "type": "str"}, + "group_ids": {"key": "properties.groupIds", "type": "[str]"}, + "private_endpoint": {"key": "properties.privateEndpoint", "type": "PrivateEndpointResource"}, + "registry_private_link_service_connection_state": { + "key": "properties.registryPrivateLinkServiceConnectionState", + "type": "RegistryPrivateLinkServiceConnectionState", + }, + "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword id: This is the private endpoint connection name created on SRP Full resource id: @@ -28479,13 +26910,14 @@ def __init__( :paramtype provisioning_state: str """ super(RegistryPrivateEndpointConnection, self).__init__(**kwargs) - self.id = kwargs.get('id', None) - self.location = kwargs.get('location', None) - self.group_ids = kwargs.get('group_ids', None) - self.private_endpoint = kwargs.get('private_endpoint', None) + self.id = kwargs.get("id", None) + self.location = kwargs.get("location", None) + self.group_ids = kwargs.get("group_ids", None) + self.private_endpoint = kwargs.get("private_endpoint", None) self.registry_private_link_service_connection_state = kwargs.get( - 'registry_private_link_service_connection_state', None) - self.provisioning_state = kwargs.get('provisioning_state', None) + "registry_private_link_service_connection_state", None + ) + self.provisioning_state = kwargs.get("provisioning_state", None) class RegistryPrivateLinkServiceConnectionState(msrest.serialization.Model): @@ -28503,15 +26935,12 @@ class RegistryPrivateLinkServiceConnectionState(msrest.serialization.Model): """ _attribute_map = { - 'actions_required': {'key': 'actionsRequired', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'status': {'key': 'status', 'type': 'str'}, + "actions_required": {"key": "actionsRequired", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "status": {"key": "status", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword actions_required: Some RP chose "None". Other RPs use this for region expansion. :paramtype actions_required: str @@ -28524,9 +26953,9 @@ def __init__( ~azure.mgmt.machinelearningservices.models.EndpointServiceConnectionStatus """ super(RegistryPrivateLinkServiceConnectionState, self).__init__(**kwargs) - self.actions_required = kwargs.get('actions_required', None) - self.description = kwargs.get('description', None) - self.status = kwargs.get('status', None) + self.actions_required = kwargs.get("actions_required", None) + self.description = kwargs.get("description", None) + self.status = kwargs.get("status", None) class RegistryRegionArmDetails(msrest.serialization.Model): @@ -28542,15 +26971,12 @@ class RegistryRegionArmDetails(msrest.serialization.Model): """ _attribute_map = { - 'acr_details': {'key': 'acrDetails', 'type': '[AcrDetails]'}, - 'location': {'key': 'location', 'type': 'str'}, - 'storage_account_details': {'key': 'storageAccountDetails', 'type': '[StorageAccountDetails]'}, + "acr_details": {"key": "acrDetails", "type": "[AcrDetails]"}, + "location": {"key": "location", "type": "str"}, + "storage_account_details": {"key": "storageAccountDetails", "type": "[StorageAccountDetails]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword acr_details: List of ACR accounts. :paramtype acr_details: list[~azure.mgmt.machinelearningservices.models.AcrDetails] @@ -28561,9 +26987,9 @@ def __init__( list[~azure.mgmt.machinelearningservices.models.StorageAccountDetails] """ super(RegistryRegionArmDetails, self).__init__(**kwargs) - self.acr_details = kwargs.get('acr_details', None) - self.location = kwargs.get('location', None) - self.storage_account_details = kwargs.get('storage_account_details', None) + self.acr_details = kwargs.get("acr_details", None) + self.location = kwargs.get("location", None) + self.storage_account_details = kwargs.get("storage_account_details", None) class RegistryTrackedResourceArmPaginatedResult(msrest.serialization.Model): @@ -28577,14 +27003,11 @@ class RegistryTrackedResourceArmPaginatedResult(msrest.serialization.Model): """ _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[Registry]'}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[Registry]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword next_link: The link to the next page of Registry objects. If null, there are no additional pages. @@ -28593,8 +27016,8 @@ def __init__( :paramtype value: list[~azure.mgmt.machinelearningservices.models.Registry] """ super(RegistryTrackedResourceArmPaginatedResult, self).__init__(**kwargs) - self.next_link = kwargs.get('next_link', None) - self.value = kwargs.get('value', None) + self.next_link = kwargs.get("next_link", None) + self.value = kwargs.get("value", None) class Regression(AutoMLVertical, TableVertical): @@ -28661,35 +27084,32 @@ class Regression(AutoMLVertical, TableVertical): """ _validation = { - 'task_type': {'required': True}, - 'training_data': {'required': True}, + "task_type": {"required": True}, + "training_data": {"required": True}, } _attribute_map = { - 'cv_split_column_names': {'key': 'cvSplitColumnNames', 'type': '[str]'}, - 'featurization_settings': {'key': 'featurizationSettings', 'type': 'TableVerticalFeaturizationSettings'}, - 'fixed_parameters': {'key': 'fixedParameters', 'type': 'TableFixedParameters'}, - 'limit_settings': {'key': 'limitSettings', 'type': 'TableVerticalLimitSettings'}, - 'n_cross_validations': {'key': 'nCrossValidations', 'type': 'NCrossValidations'}, - 'search_space': {'key': 'searchSpace', 'type': '[TableParameterSubspace]'}, - 'sweep_settings': {'key': 'sweepSettings', 'type': 'TableSweepSettings'}, - 'test_data': {'key': 'testData', 'type': 'MLTableJobInput'}, - 'test_data_size': {'key': 'testDataSize', 'type': 'float'}, - 'validation_data': {'key': 'validationData', 'type': 'MLTableJobInput'}, - 'validation_data_size': {'key': 'validationDataSize', 'type': 'float'}, - 'weight_column_name': {'key': 'weightColumnName', 'type': 'str'}, - 'log_verbosity': {'key': 'logVerbosity', 'type': 'str'}, - 'target_column_name': {'key': 'targetColumnName', 'type': 'str'}, - 'task_type': {'key': 'taskType', 'type': 'str'}, - 'training_data': {'key': 'trainingData', 'type': 'MLTableJobInput'}, - 'primary_metric': {'key': 'primaryMetric', 'type': 'str'}, - 'training_settings': {'key': 'trainingSettings', 'type': 'RegressionTrainingSettings'}, + "cv_split_column_names": {"key": "cvSplitColumnNames", "type": "[str]"}, + "featurization_settings": {"key": "featurizationSettings", "type": "TableVerticalFeaturizationSettings"}, + "fixed_parameters": {"key": "fixedParameters", "type": "TableFixedParameters"}, + "limit_settings": {"key": "limitSettings", "type": "TableVerticalLimitSettings"}, + "n_cross_validations": {"key": "nCrossValidations", "type": "NCrossValidations"}, + "search_space": {"key": "searchSpace", "type": "[TableParameterSubspace]"}, + "sweep_settings": {"key": "sweepSettings", "type": "TableSweepSettings"}, + "test_data": {"key": "testData", "type": "MLTableJobInput"}, + "test_data_size": {"key": "testDataSize", "type": "float"}, + "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, + "validation_data_size": {"key": "validationDataSize", "type": "float"}, + "weight_column_name": {"key": "weightColumnName", "type": "str"}, + "log_verbosity": {"key": "logVerbosity", "type": "str"}, + "target_column_name": {"key": "targetColumnName", "type": "str"}, + "task_type": {"key": "taskType", "type": "str"}, + "training_data": {"key": "trainingData", "type": "MLTableJobInput"}, + "primary_metric": {"key": "primaryMetric", "type": "str"}, + "training_settings": {"key": "trainingSettings", "type": "RegressionTrainingSettings"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword cv_split_column_names: Columns to use for CVSplit data. :paramtype cv_split_column_names: list[str] @@ -28747,24 +27167,24 @@ def __init__( ~azure.mgmt.machinelearningservices.models.RegressionTrainingSettings """ super(Regression, self).__init__(**kwargs) - self.cv_split_column_names = kwargs.get('cv_split_column_names', None) - self.featurization_settings = kwargs.get('featurization_settings', None) - self.fixed_parameters = kwargs.get('fixed_parameters', None) - self.limit_settings = kwargs.get('limit_settings', None) - self.n_cross_validations = kwargs.get('n_cross_validations', None) - self.search_space = kwargs.get('search_space', None) - self.sweep_settings = kwargs.get('sweep_settings', None) - self.test_data = kwargs.get('test_data', None) - self.test_data_size = kwargs.get('test_data_size', None) - self.validation_data = kwargs.get('validation_data', None) - self.validation_data_size = kwargs.get('validation_data_size', None) - self.weight_column_name = kwargs.get('weight_column_name', None) - self.task_type = 'Regression' # type: str - self.primary_metric = kwargs.get('primary_metric', None) - self.training_settings = kwargs.get('training_settings', None) - self.log_verbosity = kwargs.get('log_verbosity', None) - self.target_column_name = kwargs.get('target_column_name', None) - self.training_data = kwargs['training_data'] + self.cv_split_column_names = kwargs.get("cv_split_column_names", None) + self.featurization_settings = kwargs.get("featurization_settings", None) + self.fixed_parameters = kwargs.get("fixed_parameters", None) + self.limit_settings = kwargs.get("limit_settings", None) + self.n_cross_validations = kwargs.get("n_cross_validations", None) + self.search_space = kwargs.get("search_space", None) + self.sweep_settings = kwargs.get("sweep_settings", None) + self.test_data = kwargs.get("test_data", None) + self.test_data_size = kwargs.get("test_data_size", None) + self.validation_data = kwargs.get("validation_data", None) + self.validation_data_size = kwargs.get("validation_data_size", None) + self.weight_column_name = kwargs.get("weight_column_name", None) + self.task_type = "Regression" # type: str + self.primary_metric = kwargs.get("primary_metric", None) + self.training_settings = kwargs.get("training_settings", None) + self.log_verbosity = kwargs.get("log_verbosity", None) + self.target_column_name = kwargs.get("target_column_name", None) + self.training_data = kwargs["training_data"] class RegressionModelPerformanceMetricThreshold(ModelPerformanceMetricThresholdBase): @@ -28785,20 +27205,17 @@ class RegressionModelPerformanceMetricThreshold(ModelPerformanceMetricThresholdB """ _validation = { - 'model_type': {'required': True}, - 'metric': {'required': True}, + "model_type": {"required": True}, + "metric": {"required": True}, } _attribute_map = { - 'model_type': {'key': 'modelType', 'type': 'str'}, - 'threshold': {'key': 'threshold', 'type': 'MonitoringThreshold'}, - 'metric': {'key': 'metric', 'type': 'str'}, + "model_type": {"key": "modelType", "type": "str"}, + "threshold": {"key": "threshold", "type": "MonitoringThreshold"}, + "metric": {"key": "metric", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword threshold: The threshold value. If null, a default value will be set depending on the selected metric. @@ -28809,8 +27226,8 @@ def __init__( ~azure.mgmt.machinelearningservices.models.RegressionModelPerformanceMetric """ super(RegressionModelPerformanceMetricThreshold, self).__init__(**kwargs) - self.model_type = 'Regression' # type: str - self.metric = kwargs['metric'] + self.model_type = "Regression" # type: str + self.metric = kwargs["metric"] class RegressionTrainingSettings(TrainingSettings): @@ -28850,22 +27267,19 @@ class RegressionTrainingSettings(TrainingSettings): """ _attribute_map = { - 'enable_dnn_training': {'key': 'enableDnnTraining', 'type': 'bool'}, - 'enable_model_explainability': {'key': 'enableModelExplainability', 'type': 'bool'}, - 'enable_onnx_compatible_models': {'key': 'enableOnnxCompatibleModels', 'type': 'bool'}, - 'enable_stack_ensemble': {'key': 'enableStackEnsemble', 'type': 'bool'}, - 'enable_vote_ensemble': {'key': 'enableVoteEnsemble', 'type': 'bool'}, - 'ensemble_model_download_timeout': {'key': 'ensembleModelDownloadTimeout', 'type': 'duration'}, - 'stack_ensemble_settings': {'key': 'stackEnsembleSettings', 'type': 'StackEnsembleSettings'}, - 'training_mode': {'key': 'trainingMode', 'type': 'str'}, - 'allowed_training_algorithms': {'key': 'allowedTrainingAlgorithms', 'type': '[str]'}, - 'blocked_training_algorithms': {'key': 'blockedTrainingAlgorithms', 'type': '[str]'}, + "enable_dnn_training": {"key": "enableDnnTraining", "type": "bool"}, + "enable_model_explainability": {"key": "enableModelExplainability", "type": "bool"}, + "enable_onnx_compatible_models": {"key": "enableOnnxCompatibleModels", "type": "bool"}, + "enable_stack_ensemble": {"key": "enableStackEnsemble", "type": "bool"}, + "enable_vote_ensemble": {"key": "enableVoteEnsemble", "type": "bool"}, + "ensemble_model_download_timeout": {"key": "ensembleModelDownloadTimeout", "type": "duration"}, + "stack_ensemble_settings": {"key": "stackEnsembleSettings", "type": "StackEnsembleSettings"}, + "training_mode": {"key": "trainingMode", "type": "str"}, + "allowed_training_algorithms": {"key": "allowedTrainingAlgorithms", "type": "[str]"}, + "blocked_training_algorithms": {"key": "blockedTrainingAlgorithms", "type": "[str]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword enable_dnn_training: Enable recommendation of DNN models. :paramtype enable_dnn_training: bool @@ -28900,8 +27314,8 @@ def __init__( ~azure.mgmt.machinelearningservices.models.RegressionModels] """ super(RegressionTrainingSettings, self).__init__(**kwargs) - self.allowed_training_algorithms = kwargs.get('allowed_training_algorithms', None) - self.blocked_training_algorithms = kwargs.get('blocked_training_algorithms', None) + self.allowed_training_algorithms = kwargs.get("allowed_training_algorithms", None) + self.blocked_training_algorithms = kwargs.get("blocked_training_algorithms", None) class RequestConfiguration(msrest.serialization.Model): @@ -28916,14 +27330,11 @@ class RequestConfiguration(msrest.serialization.Model): """ _attribute_map = { - 'max_concurrent_requests_per_instance': {'key': 'maxConcurrentRequestsPerInstance', 'type': 'int'}, - 'request_timeout': {'key': 'requestTimeout', 'type': 'duration'}, + "max_concurrent_requests_per_instance": {"key": "maxConcurrentRequestsPerInstance", "type": "int"}, + "request_timeout": {"key": "requestTimeout", "type": "duration"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword max_concurrent_requests_per_instance: The number of maximum concurrent requests per node allowed per deployment. Defaults to 1. @@ -28933,8 +27344,8 @@ def __init__( :paramtype request_timeout: ~datetime.timedelta """ super(RequestConfiguration, self).__init__(**kwargs) - self.max_concurrent_requests_per_instance = kwargs.get('max_concurrent_requests_per_instance', 1) - self.request_timeout = kwargs.get('request_timeout', "PT5S") + self.max_concurrent_requests_per_instance = kwargs.get("max_concurrent_requests_per_instance", 1) + self.request_timeout = kwargs.get("request_timeout", "PT5S") class RequestLogging(msrest.serialization.Model): @@ -28947,13 +27358,10 @@ class RequestLogging(msrest.serialization.Model): """ _attribute_map = { - 'capture_headers': {'key': 'captureHeaders', 'type': '[str]'}, + "capture_headers": {"key": "captureHeaders", "type": "[str]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword capture_headers: For payload logging, we only collect payload by default. If customers also want to collect the specified headers, they can set them in captureHeaders so that backend @@ -28961,7 +27369,7 @@ def __init__( :paramtype capture_headers: list[str] """ super(RequestLogging, self).__init__(**kwargs) - self.capture_headers = kwargs.get('capture_headers', None) + self.capture_headers = kwargs.get("capture_headers", None) class RequestMatchPattern(msrest.serialization.Model): @@ -28974,14 +27382,11 @@ class RequestMatchPattern(msrest.serialization.Model): """ _attribute_map = { - 'path': {'key': 'path', 'type': 'str'}, - 'method': {'key': 'method', 'type': 'str'}, + "path": {"key": "path", "type": "str"}, + "method": {"key": "method", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword path: :paramtype path: str @@ -28989,8 +27394,8 @@ def __init__( :paramtype method: str """ super(RequestMatchPattern, self).__init__(**kwargs) - self.path = kwargs.get('path', None) - self.method = kwargs.get('method', None) + self.path = kwargs.get("path", None) + self.method = kwargs.get("method", None) class ResizeSchema(msrest.serialization.Model): @@ -29001,19 +27406,16 @@ class ResizeSchema(msrest.serialization.Model): """ _attribute_map = { - 'target_vm_size': {'key': 'targetVMSize', 'type': 'str'}, + "target_vm_size": {"key": "targetVMSize", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword target_vm_size: The name of the virtual machine size. :paramtype target_vm_size: str """ super(ResizeSchema, self).__init__(**kwargs) - self.target_vm_size = kwargs.get('target_vm_size', None) + self.target_vm_size = kwargs.get("target_vm_size", None) class ResourceId(msrest.serialization.Model): @@ -29026,23 +27428,20 @@ class ResourceId(msrest.serialization.Model): """ _validation = { - 'id': {'required': True}, + "id": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, + "id": {"key": "id", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword id: Required. The ID of the resource. :paramtype id: str """ super(ResourceId, self).__init__(**kwargs) - self.id = kwargs['id'] + self.id = kwargs["id"] class ResourceName(msrest.serialization.Model): @@ -29057,21 +27456,17 @@ class ResourceName(msrest.serialization.Model): """ _validation = { - 'value': {'readonly': True}, - 'localized_value': {'readonly': True}, + "value": {"readonly": True}, + "localized_value": {"readonly": True}, } _attribute_map = { - 'value': {'key': 'value', 'type': 'str'}, - 'localized_value': {'key': 'localizedValue', 'type': 'str'}, + "value": {"key": "value", "type": "str"}, + "localized_value": {"key": "localizedValue", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(ResourceName, self).__init__(**kwargs) self.value = None self.localized_value = None @@ -29097,29 +27492,25 @@ class ResourceQuota(msrest.serialization.Model): """ _validation = { - 'id': {'readonly': True}, - 'aml_workspace_location': {'readonly': True}, - 'type': {'readonly': True}, - 'name': {'readonly': True}, - 'limit': {'readonly': True}, - 'unit': {'readonly': True}, + "id": {"readonly": True}, + "aml_workspace_location": {"readonly": True}, + "type": {"readonly": True}, + "name": {"readonly": True}, + "limit": {"readonly": True}, + "unit": {"readonly": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'aml_workspace_location': {'key': 'amlWorkspaceLocation', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'ResourceName'}, - 'limit': {'key': 'limit', 'type': 'long'}, - 'unit': {'key': 'unit', 'type': 'str'}, + "id": {"key": "id", "type": "str"}, + "aml_workspace_location": {"key": "amlWorkspaceLocation", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "name": {"key": "name", "type": "ResourceName"}, + "limit": {"key": "limit", "type": "long"}, + "unit": {"key": "unit", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(ResourceQuota, self).__init__(**kwargs) self.id = None self.aml_workspace_location = None @@ -29158,28 +27549,25 @@ class RollingInputData(MonitoringInputDataBase): """ _validation = { - 'input_data_type': {'required': True}, - 'job_input_type': {'required': True}, - 'uri': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'window_offset': {'required': True}, - 'window_size': {'required': True}, + "input_data_type": {"required": True}, + "job_input_type": {"required": True}, + "uri": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "window_offset": {"required": True}, + "window_size": {"required": True}, } _attribute_map = { - 'columns': {'key': 'columns', 'type': '{str}'}, - 'data_context': {'key': 'dataContext', 'type': 'str'}, - 'input_data_type': {'key': 'inputDataType', 'type': 'str'}, - 'job_input_type': {'key': 'jobInputType', 'type': 'str'}, - 'uri': {'key': 'uri', 'type': 'str'}, - 'preprocessing_component_id': {'key': 'preprocessingComponentId', 'type': 'str'}, - 'window_offset': {'key': 'windowOffset', 'type': 'duration'}, - 'window_size': {'key': 'windowSize', 'type': 'duration'}, + "columns": {"key": "columns", "type": "{str}"}, + "data_context": {"key": "dataContext", "type": "str"}, + "input_data_type": {"key": "inputDataType", "type": "str"}, + "job_input_type": {"key": "jobInputType", "type": "str"}, + "uri": {"key": "uri", "type": "str"}, + "preprocessing_component_id": {"key": "preprocessingComponentId", "type": "str"}, + "window_offset": {"key": "windowOffset", "type": "duration"}, + "window_size": {"key": "windowSize", "type": "duration"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword columns: Mapping of column names to special uses. :paramtype columns: dict[str, str] @@ -29201,10 +27589,10 @@ def __init__( :paramtype window_size: ~datetime.timedelta """ super(RollingInputData, self).__init__(**kwargs) - self.input_data_type = 'Rolling' # type: str - self.preprocessing_component_id = kwargs.get('preprocessing_component_id', None) - self.window_offset = kwargs['window_offset'] - self.window_size = kwargs['window_size'] + self.input_data_type = "Rolling" # type: str + self.preprocessing_component_id = kwargs.get("preprocessing_component_id", None) + self.window_offset = kwargs["window_offset"] + self.window_size = kwargs["window_size"] class Route(msrest.serialization.Model): @@ -29219,19 +27607,16 @@ class Route(msrest.serialization.Model): """ _validation = { - 'path': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'port': {'required': True}, + "path": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "port": {"required": True}, } _attribute_map = { - 'path': {'key': 'path', 'type': 'str'}, - 'port': {'key': 'port', 'type': 'int'}, + "path": {"key": "path", "type": "str"}, + "port": {"key": "port", "type": "int"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword path: Required. [Required] The path for the route. :paramtype path: str @@ -29239,8 +27624,8 @@ def __init__( :paramtype port: int """ super(Route, self).__init__(**kwargs) - self.path = kwargs['path'] - self.port = kwargs['port'] + self.path = kwargs["path"] + self.port = kwargs["port"] class SASAuthTypeWorkspaceConnectionProperties(WorkspaceConnectionPropertiesV2): @@ -29293,28 +27678,25 @@ class SASAuthTypeWorkspaceConnectionProperties(WorkspaceConnectionPropertiesV2): """ _validation = { - 'auth_type': {'required': True}, - 'created_by_workspace_arm_id': {'readonly': True}, - 'group': {'readonly': True}, + "auth_type": {"required": True}, + "created_by_workspace_arm_id": {"readonly": True}, + "group": {"readonly": True}, } _attribute_map = { - 'auth_type': {'key': 'authType', 'type': 'str'}, - 'category': {'key': 'category', 'type': 'str'}, - 'created_by_workspace_arm_id': {'key': 'createdByWorkspaceArmId', 'type': 'str'}, - 'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'}, - 'group': {'key': 'group', 'type': 'str'}, - 'is_shared_to_all': {'key': 'isSharedToAll', 'type': 'bool'}, - 'metadata': {'key': 'metadata', 'type': 'object'}, - 'shared_user_list': {'key': 'sharedUserList', 'type': '[str]'}, - 'target': {'key': 'target', 'type': 'str'}, - 'credentials': {'key': 'credentials', 'type': 'WorkspaceConnectionSharedAccessSignature'}, + "auth_type": {"key": "authType", "type": "str"}, + "category": {"key": "category", "type": "str"}, + "created_by_workspace_arm_id": {"key": "createdByWorkspaceArmId", "type": "str"}, + "expiry_time": {"key": "expiryTime", "type": "iso-8601"}, + "group": {"key": "group", "type": "str"}, + "is_shared_to_all": {"key": "isSharedToAll", "type": "bool"}, + "metadata": {"key": "metadata", "type": "object"}, + "shared_user_list": {"key": "sharedUserList", "type": "[str]"}, + "target": {"key": "target", "type": "str"}, + "credentials": {"key": "credentials", "type": "WorkspaceConnectionSharedAccessSignature"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword category: Category of the connection. Possible values include: "PythonFeed", "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", @@ -29349,8 +27731,8 @@ def __init__( ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionSharedAccessSignature """ super(SASAuthTypeWorkspaceConnectionProperties, self).__init__(**kwargs) - self.auth_type = 'SAS' # type: str - self.credentials = kwargs.get('credentials', None) + self.auth_type = "SAS" # type: str + self.credentials = kwargs.get("credentials", None) class SASCredential(DataReferenceCredential): @@ -29368,25 +27750,22 @@ class SASCredential(DataReferenceCredential): """ _validation = { - 'credential_type': {'required': True}, + "credential_type": {"required": True}, } _attribute_map = { - 'credential_type': {'key': 'credentialType', 'type': 'str'}, - 'sas_uri': {'key': 'sasUri', 'type': 'str'}, + "credential_type": {"key": "credentialType", "type": "str"}, + "sas_uri": {"key": "sasUri", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword sas_uri: Full SAS Uri, including the storage, container/blob path and SAS token. :paramtype sas_uri: str """ super(SASCredential, self).__init__(**kwargs) - self.credential_type = 'SAS' # type: str - self.sas_uri = kwargs.get('sas_uri', None) + self.credential_type = "SAS" # type: str + self.sas_uri = kwargs.get("sas_uri", None) class SASCredentialDto(PendingUploadCredentialDto): @@ -29403,25 +27782,22 @@ class SASCredentialDto(PendingUploadCredentialDto): """ _validation = { - 'credential_type': {'required': True}, + "credential_type": {"required": True}, } _attribute_map = { - 'credential_type': {'key': 'credentialType', 'type': 'str'}, - 'sas_uri': {'key': 'sasUri', 'type': 'str'}, + "credential_type": {"key": "credentialType", "type": "str"}, + "sas_uri": {"key": "sasUri", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword sas_uri: Full SAS Uri, including the storage, container/blob path and SAS token. :paramtype sas_uri: str """ super(SASCredentialDto, self).__init__(**kwargs) - self.credential_type = 'SAS' # type: str - self.sas_uri = kwargs.get('sas_uri', None) + self.credential_type = "SAS" # type: str + self.sas_uri = kwargs.get("sas_uri", None) class SasDatastoreCredentials(DatastoreCredentials): @@ -29438,26 +27814,23 @@ class SasDatastoreCredentials(DatastoreCredentials): """ _validation = { - 'credentials_type': {'required': True}, - 'secrets': {'required': True}, + "credentials_type": {"required": True}, + "secrets": {"required": True}, } _attribute_map = { - 'credentials_type': {'key': 'credentialsType', 'type': 'str'}, - 'secrets': {'key': 'secrets', 'type': 'SasDatastoreSecrets'}, + "credentials_type": {"key": "credentialsType", "type": "str"}, + "secrets": {"key": "secrets", "type": "SasDatastoreSecrets"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword secrets: Required. [Required] Storage container secrets. :paramtype secrets: ~azure.mgmt.machinelearningservices.models.SasDatastoreSecrets """ super(SasDatastoreCredentials, self).__init__(**kwargs) - self.credentials_type = 'Sas' # type: str - self.secrets = kwargs['secrets'] + self.credentials_type = "Sas" # type: str + self.secrets = kwargs["secrets"] class SasDatastoreSecrets(DatastoreSecrets): @@ -29474,25 +27847,22 @@ class SasDatastoreSecrets(DatastoreSecrets): """ _validation = { - 'secrets_type': {'required': True}, + "secrets_type": {"required": True}, } _attribute_map = { - 'secrets_type': {'key': 'secretsType', 'type': 'str'}, - 'sas_token': {'key': 'sasToken', 'type': 'str'}, + "secrets_type": {"key": "secretsType", "type": "str"}, + "sas_token": {"key": "sasToken", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword sas_token: Storage container SAS token. :paramtype sas_token: str """ super(SasDatastoreSecrets, self).__init__(**kwargs) - self.secrets_type = 'Sas' # type: str - self.sas_token = kwargs.get('sas_token', None) + self.secrets_type = "Sas" # type: str + self.sas_token = kwargs.get("sas_token", None) class ScaleSettings(msrest.serialization.Model): @@ -29510,19 +27880,16 @@ class ScaleSettings(msrest.serialization.Model): """ _validation = { - 'max_node_count': {'required': True}, + "max_node_count": {"required": True}, } _attribute_map = { - 'max_node_count': {'key': 'maxNodeCount', 'type': 'int'}, - 'min_node_count': {'key': 'minNodeCount', 'type': 'int'}, - 'node_idle_time_before_scale_down': {'key': 'nodeIdleTimeBeforeScaleDown', 'type': 'duration'}, + "max_node_count": {"key": "maxNodeCount", "type": "int"}, + "min_node_count": {"key": "minNodeCount", "type": "int"}, + "node_idle_time_before_scale_down": {"key": "nodeIdleTimeBeforeScaleDown", "type": "duration"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword max_node_count: Required. Max number of nodes to use. :paramtype max_node_count: int @@ -29533,9 +27900,9 @@ def __init__( :paramtype node_idle_time_before_scale_down: ~datetime.timedelta """ super(ScaleSettings, self).__init__(**kwargs) - self.max_node_count = kwargs['max_node_count'] - self.min_node_count = kwargs.get('min_node_count', 0) - self.node_idle_time_before_scale_down = kwargs.get('node_idle_time_before_scale_down', None) + self.max_node_count = kwargs["max_node_count"] + self.min_node_count = kwargs.get("min_node_count", 0) + self.node_idle_time_before_scale_down = kwargs.get("node_idle_time_before_scale_down", None) class ScaleSettingsInformation(msrest.serialization.Model): @@ -29546,19 +27913,16 @@ class ScaleSettingsInformation(msrest.serialization.Model): """ _attribute_map = { - 'scale_settings': {'key': 'scaleSettings', 'type': 'ScaleSettings'}, + "scale_settings": {"key": "scaleSettings", "type": "ScaleSettings"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword scale_settings: scale settings for AML Compute. :paramtype scale_settings: ~azure.mgmt.machinelearningservices.models.ScaleSettings """ super(ScaleSettingsInformation, self).__init__(**kwargs) - self.scale_settings = kwargs.get('scale_settings', None) + self.scale_settings = kwargs.get("scale_settings", None) class Schedule(ProxyResource): @@ -29584,31 +27948,28 @@ class Schedule(ProxyResource): """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'properties': {'key': 'properties', 'type': 'ScheduleProperties'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "ScheduleProperties"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword properties: Required. [Required] Additional attributes of the entity. :paramtype properties: ~azure.mgmt.machinelearningservices.models.ScheduleProperties """ super(Schedule, self).__init__(**kwargs) - self.properties = kwargs['properties'] + self.properties = kwargs["properties"] class ScheduleBase(msrest.serialization.Model): @@ -29626,15 +27987,12 @@ class ScheduleBase(msrest.serialization.Model): """ _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'provisioning_status': {'key': 'provisioningStatus', 'type': 'str'}, - 'status': {'key': 'status', 'type': 'str'}, + "id": {"key": "id", "type": "str"}, + "provisioning_status": {"key": "provisioningStatus", "type": "str"}, + "status": {"key": "status", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword id: A system assigned id for the schedule. :paramtype id: str @@ -29647,9 +28005,9 @@ def __init__( :paramtype status: str or ~azure.mgmt.machinelearningservices.models.ScheduleStatus """ super(ScheduleBase, self).__init__(**kwargs) - self.id = kwargs.get('id', None) - self.provisioning_status = kwargs.get('provisioning_status', None) - self.status = kwargs.get('status', None) + self.id = kwargs.get("id", None) + self.provisioning_status = kwargs.get("provisioning_status", None) + self.status = kwargs.get("status", None) class ScheduleProperties(ResourceBase): @@ -29680,26 +28038,23 @@ class ScheduleProperties(ResourceBase): """ _validation = { - 'action': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'trigger': {'required': True}, + "action": {"required": True}, + "provisioning_state": {"readonly": True}, + "trigger": {"required": True}, } _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'action': {'key': 'action', 'type': 'ScheduleActionBase'}, - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'is_enabled': {'key': 'isEnabled', 'type': 'bool'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'trigger': {'key': 'trigger', 'type': 'TriggerBase'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "action": {"key": "action", "type": "ScheduleActionBase"}, + "display_name": {"key": "displayName", "type": "str"}, + "is_enabled": {"key": "isEnabled", "type": "bool"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "trigger": {"key": "trigger", "type": "TriggerBase"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword description: The asset description text. :paramtype description: str @@ -29717,11 +28072,11 @@ def __init__( :paramtype trigger: ~azure.mgmt.machinelearningservices.models.TriggerBase """ super(ScheduleProperties, self).__init__(**kwargs) - self.action = kwargs['action'] - self.display_name = kwargs.get('display_name', None) - self.is_enabled = kwargs.get('is_enabled', True) + self.action = kwargs["action"] + self.display_name = kwargs.get("display_name", None) + self.is_enabled = kwargs.get("is_enabled", True) self.provisioning_state = None - self.trigger = kwargs['trigger'] + self.trigger = kwargs["trigger"] class ScheduleResourceArmPaginatedResult(msrest.serialization.Model): @@ -29735,14 +28090,11 @@ class ScheduleResourceArmPaginatedResult(msrest.serialization.Model): """ _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[Schedule]'}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[Schedule]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword next_link: The link to the next page of Schedule objects. If null, there are no additional pages. @@ -29751,8 +28103,8 @@ def __init__( :paramtype value: list[~azure.mgmt.machinelearningservices.models.Schedule] """ super(ScheduleResourceArmPaginatedResult, self).__init__(**kwargs) - self.next_link = kwargs.get('next_link', None) - self.value = kwargs.get('value', None) + self.next_link = kwargs.get("next_link", None) + self.value = kwargs.get("value", None) class ScriptReference(msrest.serialization.Model): @@ -29769,16 +28121,13 @@ class ScriptReference(msrest.serialization.Model): """ _attribute_map = { - 'script_source': {'key': 'scriptSource', 'type': 'str'}, - 'script_data': {'key': 'scriptData', 'type': 'str'}, - 'script_arguments': {'key': 'scriptArguments', 'type': 'str'}, - 'timeout': {'key': 'timeout', 'type': 'str'}, + "script_source": {"key": "scriptSource", "type": "str"}, + "script_data": {"key": "scriptData", "type": "str"}, + "script_arguments": {"key": "scriptArguments", "type": "str"}, + "timeout": {"key": "timeout", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword script_source: The storage source of the script: inline, workspace. :paramtype script_source: str @@ -29790,10 +28139,10 @@ def __init__( :paramtype timeout: str """ super(ScriptReference, self).__init__(**kwargs) - self.script_source = kwargs.get('script_source', None) - self.script_data = kwargs.get('script_data', None) - self.script_arguments = kwargs.get('script_arguments', None) - self.timeout = kwargs.get('timeout', None) + self.script_source = kwargs.get("script_source", None) + self.script_data = kwargs.get("script_data", None) + self.script_arguments = kwargs.get("script_arguments", None) + self.timeout = kwargs.get("timeout", None) class ScriptsToExecute(msrest.serialization.Model): @@ -29806,14 +28155,11 @@ class ScriptsToExecute(msrest.serialization.Model): """ _attribute_map = { - 'startup_script': {'key': 'startupScript', 'type': 'ScriptReference'}, - 'creation_script': {'key': 'creationScript', 'type': 'ScriptReference'}, + "startup_script": {"key": "startupScript", "type": "ScriptReference"}, + "creation_script": {"key": "creationScript", "type": "ScriptReference"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword startup_script: Script that's run every time the machine starts. :paramtype startup_script: ~azure.mgmt.machinelearningservices.models.ScriptReference @@ -29821,8 +28167,8 @@ def __init__( :paramtype creation_script: ~azure.mgmt.machinelearningservices.models.ScriptReference """ super(ScriptsToExecute, self).__init__(**kwargs) - self.startup_script = kwargs.get('startup_script', None) - self.creation_script = kwargs.get('creation_script', None) + self.startup_script = kwargs.get("startup_script", None) + self.creation_script = kwargs.get("creation_script", None) class SecretConfiguration(msrest.serialization.Model): @@ -29836,14 +28182,11 @@ class SecretConfiguration(msrest.serialization.Model): """ _attribute_map = { - 'uri': {'key': 'uri', 'type': 'str'}, - 'workspace_secret_name': {'key': 'workspaceSecretName', 'type': 'str'}, + "uri": {"key": "uri", "type": "str"}, + "workspace_secret_name": {"key": "workspaceSecretName", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword uri: Secret Uri. Sample Uri : https://myvault.vault.azure.net/secrets/mysecretname/secretversion. @@ -29852,8 +28195,8 @@ def __init__( :paramtype workspace_secret_name: str """ super(SecretConfiguration, self).__init__(**kwargs) - self.uri = kwargs.get('uri', None) - self.workspace_secret_name = kwargs.get('workspace_secret_name', None) + self.uri = kwargs.get("uri", None) + self.workspace_secret_name = kwargs.get("workspace_secret_name", None) class ServerlessComputeSettings(msrest.serialization.Model): @@ -29868,14 +28211,11 @@ class ServerlessComputeSettings(msrest.serialization.Model): """ _attribute_map = { - 'serverless_compute_custom_subnet': {'key': 'serverlessComputeCustomSubnet', 'type': 'str'}, - 'serverless_compute_no_public_ip': {'key': 'serverlessComputeNoPublicIP', 'type': 'bool'}, + "serverless_compute_custom_subnet": {"key": "serverlessComputeCustomSubnet", "type": "str"}, + "serverless_compute_no_public_ip": {"key": "serverlessComputeNoPublicIP", "type": "bool"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword serverless_compute_custom_subnet: The resource ID of an existing virtual network subnet in which serverless compute nodes should be deployed. @@ -29886,8 +28226,8 @@ def __init__( :paramtype serverless_compute_no_public_ip: bool """ super(ServerlessComputeSettings, self).__init__(**kwargs) - self.serverless_compute_custom_subnet = kwargs.get('serverless_compute_custom_subnet', None) - self.serverless_compute_no_public_ip = kwargs.get('serverless_compute_no_public_ip', None) + self.serverless_compute_custom_subnet = kwargs.get("serverless_compute_custom_subnet", None) + self.serverless_compute_no_public_ip = kwargs.get("serverless_compute_no_public_ip", None) class ServerlessEndpoint(TrackedResource): @@ -29924,31 +28264,28 @@ class ServerlessEndpoint(TrackedResource): """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'location': {'required': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "location": {"required": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'location': {'key': 'location', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'}, - 'kind': {'key': 'kind', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'ServerlessEndpointProperties'}, - 'sku': {'key': 'sku', 'type': 'Sku'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "tags": {"key": "tags", "type": "{str}"}, + "location": {"key": "location", "type": "str"}, + "identity": {"key": "identity", "type": "ManagedServiceIdentity"}, + "kind": {"key": "kind", "type": "str"}, + "properties": {"key": "properties", "type": "ServerlessEndpointProperties"}, + "sku": {"key": "sku", "type": "Sku"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword tags: A set of tags. Resource tags. :paramtype tags: dict[str, str] @@ -29965,10 +28302,10 @@ def __init__( :paramtype sku: ~azure.mgmt.machinelearningservices.models.Sku """ super(ServerlessEndpoint, self).__init__(**kwargs) - self.identity = kwargs.get('identity', None) - self.kind = kwargs.get('kind', None) - self.properties = kwargs['properties'] - self.sku = kwargs.get('sku', None) + self.identity = kwargs.get("identity", None) + self.kind = kwargs.get("kind", None) + self.properties = kwargs["properties"] + self.sku = kwargs.get("sku", None) class ServerlessEndpointCapacityReservation(msrest.serialization.Model): @@ -29985,18 +28322,15 @@ class ServerlessEndpointCapacityReservation(msrest.serialization.Model): """ _validation = { - 'capacity_reservation_group_id': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "capacity_reservation_group_id": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'capacity_reservation_group_id': {'key': 'capacityReservationGroupId', 'type': 'str'}, - 'endpoint_reserved_capacity': {'key': 'endpointReservedCapacity', 'type': 'int'}, + "capacity_reservation_group_id": {"key": "capacityReservationGroupId", "type": "str"}, + "endpoint_reserved_capacity": {"key": "endpointReservedCapacity", "type": "int"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword capacity_reservation_group_id: Required. [Required] Specifies a capacity reservation group ID to allocate capacity from. @@ -30006,8 +28340,8 @@ def __init__( :paramtype endpoint_reserved_capacity: int """ super(ServerlessEndpointCapacityReservation, self).__init__(**kwargs) - self.capacity_reservation_group_id = kwargs['capacity_reservation_group_id'] - self.endpoint_reserved_capacity = kwargs.get('endpoint_reserved_capacity', None) + self.capacity_reservation_group_id = kwargs["capacity_reservation_group_id"] + self.endpoint_reserved_capacity = kwargs.get("endpoint_reserved_capacity", None) class ServerlessEndpointProperties(msrest.serialization.Model): @@ -30048,27 +28382,24 @@ class ServerlessEndpointProperties(msrest.serialization.Model): """ _validation = { - 'inference_endpoint': {'readonly': True}, - 'marketplace_subscription_id': {'readonly': True}, - 'provisioning_state': {'readonly': True}, - 'endpoint_state': {'readonly': True}, + "inference_endpoint": {"readonly": True}, + "marketplace_subscription_id": {"readonly": True}, + "provisioning_state": {"readonly": True}, + "endpoint_state": {"readonly": True}, } _attribute_map = { - 'auth_mode': {'key': 'authMode', 'type': 'str'}, - 'capacity_reservation': {'key': 'capacityReservation', 'type': 'ServerlessEndpointCapacityReservation'}, - 'inference_endpoint': {'key': 'inferenceEndpoint', 'type': 'ServerlessInferenceEndpoint'}, - 'marketplace_subscription_id': {'key': 'marketplaceSubscriptionId', 'type': 'str'}, - 'model_settings': {'key': 'modelSettings', 'type': 'ModelSettings'}, - 'offer': {'key': 'offer', 'type': 'ServerlessOffer'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'endpoint_state': {'key': 'endpointState', 'type': 'str'}, + "auth_mode": {"key": "authMode", "type": "str"}, + "capacity_reservation": {"key": "capacityReservation", "type": "ServerlessEndpointCapacityReservation"}, + "inference_endpoint": {"key": "inferenceEndpoint", "type": "ServerlessInferenceEndpoint"}, + "marketplace_subscription_id": {"key": "marketplaceSubscriptionId", "type": "str"}, + "model_settings": {"key": "modelSettings", "type": "ModelSettings"}, + "offer": {"key": "offer", "type": "ServerlessOffer"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "endpoint_state": {"key": "endpointState", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword auth_mode: Specifies the authentication mode for the Serverless endpoint. Possible values include: "Key", "AAD". @@ -30086,12 +28417,12 @@ def __init__( :paramtype offer: ~azure.mgmt.machinelearningservices.models.ServerlessOffer """ super(ServerlessEndpointProperties, self).__init__(**kwargs) - self.auth_mode = kwargs.get('auth_mode', None) - self.capacity_reservation = kwargs.get('capacity_reservation', None) + self.auth_mode = kwargs.get("auth_mode", None) + self.capacity_reservation = kwargs.get("capacity_reservation", None) self.inference_endpoint = None self.marketplace_subscription_id = None - self.model_settings = kwargs.get('model_settings', None) - self.offer = kwargs.get('offer', None) + self.model_settings = kwargs.get("model_settings", None) + self.offer = kwargs.get("offer", None) self.provisioning_state = None self.endpoint_state = None @@ -30106,19 +28437,15 @@ class ServerlessEndpointStatus(msrest.serialization.Model): """ _validation = { - 'metrics': {'readonly': True}, + "metrics": {"readonly": True}, } _attribute_map = { - 'metrics': {'key': 'metrics', 'type': '{str}'}, + "metrics": {"key": "metrics", "type": "{str}"}, } - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(ServerlessEndpointStatus, self).__init__(**kwargs) self.metrics = None @@ -30134,14 +28461,11 @@ class ServerlessEndpointTrackedResourceArmPaginatedResult(msrest.serialization.M """ _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[ServerlessEndpoint]'}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[ServerlessEndpoint]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword next_link: The link to the next page of ServerlessEndpoint objects. If null, there are no additional pages. @@ -30150,8 +28474,8 @@ def __init__( :paramtype value: list[~azure.mgmt.machinelearningservices.models.ServerlessEndpoint] """ super(ServerlessEndpointTrackedResourceArmPaginatedResult, self).__init__(**kwargs) - self.next_link = kwargs.get('next_link', None) - self.value = kwargs.get('value', None) + self.next_link = kwargs.get("next_link", None) + self.value = kwargs.get("value", None) class ServerlessInferenceEndpoint(msrest.serialization.Model): @@ -30169,19 +28493,16 @@ class ServerlessInferenceEndpoint(msrest.serialization.Model): """ _validation = { - 'headers': {'readonly': True}, - 'uri': {'required': True}, + "headers": {"readonly": True}, + "uri": {"required": True}, } _attribute_map = { - 'headers': {'key': 'headers', 'type': '{str}'}, - 'uri': {'key': 'uri', 'type': 'str'}, + "headers": {"key": "headers", "type": "{str}"}, + "uri": {"key": "uri", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword uri: Required. [Required] The inference uri to target when making requests against the Serverless Endpoint. @@ -30189,7 +28510,7 @@ def __init__( """ super(ServerlessInferenceEndpoint, self).__init__(**kwargs) self.headers = None - self.uri = kwargs['uri'] + self.uri = kwargs["uri"] class ServerlessOffer(msrest.serialization.Model): @@ -30204,19 +28525,16 @@ class ServerlessOffer(msrest.serialization.Model): """ _validation = { - 'offer_name': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'publisher': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "offer_name": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "publisher": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'offer_name': {'key': 'offerName', 'type': 'str'}, - 'publisher': {'key': 'publisher', 'type': 'str'}, + "offer_name": {"key": "offerName", "type": "str"}, + "publisher": {"key": "publisher", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword offer_name: Required. [Required] The name of the Serverless Offer. :paramtype offer_name: str @@ -30224,8 +28542,8 @@ def __init__( :paramtype publisher: str """ super(ServerlessOffer, self).__init__(**kwargs) - self.offer_name = kwargs['offer_name'] - self.publisher = kwargs['publisher'] + self.offer_name = kwargs["offer_name"] + self.publisher = kwargs["publisher"] class ServiceManagedResourcesSettings(msrest.serialization.Model): @@ -30236,19 +28554,16 @@ class ServiceManagedResourcesSettings(msrest.serialization.Model): """ _attribute_map = { - 'cosmos_db': {'key': 'cosmosDb', 'type': 'CosmosDbSettings'}, + "cosmos_db": {"key": "cosmosDb", "type": "CosmosDbSettings"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword cosmos_db: :paramtype cosmos_db: ~azure.mgmt.machinelearningservices.models.CosmosDbSettings """ super(ServiceManagedResourcesSettings, self).__init__(**kwargs) - self.cosmos_db = kwargs.get('cosmos_db', None) + self.cosmos_db = kwargs.get("cosmos_db", None) class ServicePrincipalAuthTypeWorkspaceConnectionProperties(WorkspaceConnectionPropertiesV2): @@ -30301,28 +28616,25 @@ class ServicePrincipalAuthTypeWorkspaceConnectionProperties(WorkspaceConnectionP """ _validation = { - 'auth_type': {'required': True}, - 'created_by_workspace_arm_id': {'readonly': True}, - 'group': {'readonly': True}, + "auth_type": {"required": True}, + "created_by_workspace_arm_id": {"readonly": True}, + "group": {"readonly": True}, } _attribute_map = { - 'auth_type': {'key': 'authType', 'type': 'str'}, - 'category': {'key': 'category', 'type': 'str'}, - 'created_by_workspace_arm_id': {'key': 'createdByWorkspaceArmId', 'type': 'str'}, - 'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'}, - 'group': {'key': 'group', 'type': 'str'}, - 'is_shared_to_all': {'key': 'isSharedToAll', 'type': 'bool'}, - 'metadata': {'key': 'metadata', 'type': 'object'}, - 'shared_user_list': {'key': 'sharedUserList', 'type': '[str]'}, - 'target': {'key': 'target', 'type': 'str'}, - 'credentials': {'key': 'credentials', 'type': 'WorkspaceConnectionServicePrincipal'}, + "auth_type": {"key": "authType", "type": "str"}, + "category": {"key": "category", "type": "str"}, + "created_by_workspace_arm_id": {"key": "createdByWorkspaceArmId", "type": "str"}, + "expiry_time": {"key": "expiryTime", "type": "iso-8601"}, + "group": {"key": "group", "type": "str"}, + "is_shared_to_all": {"key": "isSharedToAll", "type": "bool"}, + "metadata": {"key": "metadata", "type": "object"}, + "shared_user_list": {"key": "sharedUserList", "type": "[str]"}, + "target": {"key": "target", "type": "str"}, + "credentials": {"key": "credentials", "type": "WorkspaceConnectionServicePrincipal"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword category: Category of the connection. Possible values include: "PythonFeed", "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", @@ -30357,8 +28669,8 @@ def __init__( ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionServicePrincipal """ super(ServicePrincipalAuthTypeWorkspaceConnectionProperties, self).__init__(**kwargs) - self.auth_type = 'ServicePrincipal' # type: str - self.credentials = kwargs.get('credentials', None) + self.auth_type = "ServicePrincipal" # type: str + self.credentials = kwargs.get("credentials", None) class ServicePrincipalDatastoreCredentials(DatastoreCredentials): @@ -30383,25 +28695,22 @@ class ServicePrincipalDatastoreCredentials(DatastoreCredentials): """ _validation = { - 'credentials_type': {'required': True}, - 'client_id': {'required': True}, - 'secrets': {'required': True}, - 'tenant_id': {'required': True}, + "credentials_type": {"required": True}, + "client_id": {"required": True}, + "secrets": {"required": True}, + "tenant_id": {"required": True}, } _attribute_map = { - 'credentials_type': {'key': 'credentialsType', 'type': 'str'}, - 'authority_url': {'key': 'authorityUrl', 'type': 'str'}, - 'client_id': {'key': 'clientId', 'type': 'str'}, - 'resource_url': {'key': 'resourceUrl', 'type': 'str'}, - 'secrets': {'key': 'secrets', 'type': 'ServicePrincipalDatastoreSecrets'}, - 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + "credentials_type": {"key": "credentialsType", "type": "str"}, + "authority_url": {"key": "authorityUrl", "type": "str"}, + "client_id": {"key": "clientId", "type": "str"}, + "resource_url": {"key": "resourceUrl", "type": "str"}, + "secrets": {"key": "secrets", "type": "ServicePrincipalDatastoreSecrets"}, + "tenant_id": {"key": "tenantId", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword authority_url: Authority URL used for authentication. :paramtype authority_url: str @@ -30416,12 +28725,12 @@ def __init__( :paramtype tenant_id: str """ super(ServicePrincipalDatastoreCredentials, self).__init__(**kwargs) - self.credentials_type = 'ServicePrincipal' # type: str - self.authority_url = kwargs.get('authority_url', None) - self.client_id = kwargs['client_id'] - self.resource_url = kwargs.get('resource_url', None) - self.secrets = kwargs['secrets'] - self.tenant_id = kwargs['tenant_id'] + self.credentials_type = "ServicePrincipal" # type: str + self.authority_url = kwargs.get("authority_url", None) + self.client_id = kwargs["client_id"] + self.resource_url = kwargs.get("resource_url", None) + self.secrets = kwargs["secrets"] + self.tenant_id = kwargs["tenant_id"] class ServicePrincipalDatastoreSecrets(DatastoreSecrets): @@ -30438,25 +28747,22 @@ class ServicePrincipalDatastoreSecrets(DatastoreSecrets): """ _validation = { - 'secrets_type': {'required': True}, + "secrets_type": {"required": True}, } _attribute_map = { - 'secrets_type': {'key': 'secretsType', 'type': 'str'}, - 'client_secret': {'key': 'clientSecret', 'type': 'str'}, + "secrets_type": {"key": "secretsType", "type": "str"}, + "client_secret": {"key": "clientSecret", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword client_secret: Service principal secret. :paramtype client_secret: str """ super(ServicePrincipalDatastoreSecrets, self).__init__(**kwargs) - self.secrets_type = 'ServicePrincipal' # type: str - self.client_secret = kwargs.get('client_secret', None) + self.secrets_type = "ServicePrincipal" # type: str + self.client_secret = kwargs.get("client_secret", None) class ServiceTagDestination(msrest.serialization.Model): @@ -30477,21 +28783,18 @@ class ServiceTagDestination(msrest.serialization.Model): """ _validation = { - 'address_prefixes': {'readonly': True}, + "address_prefixes": {"readonly": True}, } _attribute_map = { - 'action': {'key': 'action', 'type': 'str'}, - 'address_prefixes': {'key': 'addressPrefixes', 'type': '[str]'}, - 'port_ranges': {'key': 'portRanges', 'type': 'str'}, - 'protocol': {'key': 'protocol', 'type': 'str'}, - 'service_tag': {'key': 'serviceTag', 'type': 'str'}, + "action": {"key": "action", "type": "str"}, + "address_prefixes": {"key": "addressPrefixes", "type": "[str]"}, + "port_ranges": {"key": "portRanges", "type": "str"}, + "protocol": {"key": "protocol", "type": "str"}, + "service_tag": {"key": "serviceTag", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword action: The action enum for networking rule. Possible values include: "Allow", "Deny". :paramtype action: str or ~azure.mgmt.machinelearningservices.models.RuleAction @@ -30503,11 +28806,11 @@ def __init__( :paramtype service_tag: str """ super(ServiceTagDestination, self).__init__(**kwargs) - self.action = kwargs.get('action', None) + self.action = kwargs.get("action", None) self.address_prefixes = None - self.port_ranges = kwargs.get('port_ranges', None) - self.protocol = kwargs.get('protocol', None) - self.service_tag = kwargs.get('service_tag', None) + self.port_ranges = kwargs.get("port_ranges", None) + self.protocol = kwargs.get("protocol", None) + self.service_tag = kwargs.get("service_tag", None) class ServiceTagOutboundRule(OutboundRule): @@ -30531,20 +28834,17 @@ class ServiceTagOutboundRule(OutboundRule): """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'category': {'key': 'category', 'type': 'str'}, - 'status': {'key': 'status', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'destination': {'key': 'destination', 'type': 'ServiceTagDestination'}, + "category": {"key": "category", "type": "str"}, + "status": {"key": "status", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "destination": {"key": "destination", "type": "ServiceTagDestination"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword category: Category of a managed network Outbound Rule of a machine learning workspace. Possible values include: "Required", "Recommended", "UserDefined". @@ -30557,8 +28857,8 @@ def __init__( :paramtype destination: ~azure.mgmt.machinelearningservices.models.ServiceTagDestination """ super(ServiceTagOutboundRule, self).__init__(**kwargs) - self.type = 'ServiceTag' # type: str - self.destination = kwargs.get('destination', None) + self.type = "ServiceTag" # type: str + self.destination = kwargs.get("destination", None) class SetupScripts(msrest.serialization.Model): @@ -30569,19 +28869,16 @@ class SetupScripts(msrest.serialization.Model): """ _attribute_map = { - 'scripts': {'key': 'scripts', 'type': 'ScriptsToExecute'}, + "scripts": {"key": "scripts", "type": "ScriptsToExecute"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword scripts: Customized setup scripts. :paramtype scripts: ~azure.mgmt.machinelearningservices.models.ScriptsToExecute """ super(SetupScripts, self).__init__(**kwargs) - self.scripts = kwargs.get('scripts', None) + self.scripts = kwargs.get("scripts", None) class SharedPrivateLinkResource(msrest.serialization.Model): @@ -30602,17 +28899,14 @@ class SharedPrivateLinkResource(msrest.serialization.Model): """ _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'group_id': {'key': 'properties.groupId', 'type': 'str'}, - 'private_link_resource_id': {'key': 'properties.privateLinkResourceId', 'type': 'str'}, - 'request_message': {'key': 'properties.requestMessage', 'type': 'str'}, - 'status': {'key': 'properties.status', 'type': 'str'}, + "name": {"key": "name", "type": "str"}, + "group_id": {"key": "properties.groupId", "type": "str"}, + "private_link_resource_id": {"key": "properties.privateLinkResourceId", "type": "str"}, + "request_message": {"key": "properties.requestMessage", "type": "str"}, + "status": {"key": "properties.status", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword name: Unique name of the private link. :paramtype name: str @@ -30628,11 +28922,11 @@ def __init__( ~azure.mgmt.machinelearningservices.models.EndpointServiceConnectionStatus """ super(SharedPrivateLinkResource, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - self.group_id = kwargs.get('group_id', None) - self.private_link_resource_id = kwargs.get('private_link_resource_id', None) - self.request_message = kwargs.get('request_message', None) - self.status = kwargs.get('status', None) + self.name = kwargs.get("name", None) + self.group_id = kwargs.get("group_id", None) + self.private_link_resource_id = kwargs.get("private_link_resource_id", None) + self.request_message = kwargs.get("request_message", None) + self.status = kwargs.get("status", None) class Sku(msrest.serialization.Model): @@ -30658,21 +28952,18 @@ class Sku(msrest.serialization.Model): """ _validation = { - 'name': {'required': True}, + "name": {"required": True}, } _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'tier': {'key': 'tier', 'type': 'str'}, - 'size': {'key': 'size', 'type': 'str'}, - 'family': {'key': 'family', 'type': 'str'}, - 'capacity': {'key': 'capacity', 'type': 'int'}, + "name": {"key": "name", "type": "str"}, + "tier": {"key": "tier", "type": "str"}, + "size": {"key": "size", "type": "str"}, + "family": {"key": "family", "type": "str"}, + "capacity": {"key": "capacity", "type": "int"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword name: Required. The name of the SKU. Ex - P3. It is typically a letter+number code. :paramtype name: str @@ -30691,11 +28982,11 @@ def __init__( :paramtype capacity: int """ super(Sku, self).__init__(**kwargs) - self.name = kwargs['name'] - self.tier = kwargs.get('tier', None) - self.size = kwargs.get('size', None) - self.family = kwargs.get('family', None) - self.capacity = kwargs.get('capacity', None) + self.name = kwargs["name"] + self.tier = kwargs.get("tier", None) + self.size = kwargs.get("size", None) + self.family = kwargs.get("family", None) + self.capacity = kwargs.get("capacity", None) class SkuCapacity(msrest.serialization.Model): @@ -30713,16 +29004,13 @@ class SkuCapacity(msrest.serialization.Model): """ _attribute_map = { - 'default': {'key': 'default', 'type': 'int'}, - 'maximum': {'key': 'maximum', 'type': 'int'}, - 'minimum': {'key': 'minimum', 'type': 'int'}, - 'scale_type': {'key': 'scaleType', 'type': 'str'}, + "default": {"key": "default", "type": "int"}, + "maximum": {"key": "maximum", "type": "int"}, + "minimum": {"key": "minimum", "type": "int"}, + "scale_type": {"key": "scaleType", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword default: Gets or sets the default capacity. :paramtype default: int @@ -30735,10 +29023,10 @@ def __init__( :paramtype scale_type: str or ~azure.mgmt.machinelearningservices.models.SkuScaleType """ super(SkuCapacity, self).__init__(**kwargs) - self.default = kwargs.get('default', 0) - self.maximum = kwargs.get('maximum', 0) - self.minimum = kwargs.get('minimum', 0) - self.scale_type = kwargs.get('scale_type', None) + self.default = kwargs.get("default", 0) + self.maximum = kwargs.get("maximum", 0) + self.minimum = kwargs.get("minimum", 0) + self.scale_type = kwargs.get("scale_type", None) class SkuResource(msrest.serialization.Model): @@ -30755,19 +29043,16 @@ class SkuResource(msrest.serialization.Model): """ _validation = { - 'resource_type': {'readonly': True}, + "resource_type": {"readonly": True}, } _attribute_map = { - 'capacity': {'key': 'capacity', 'type': 'SkuCapacity'}, - 'resource_type': {'key': 'resourceType', 'type': 'str'}, - 'sku': {'key': 'sku', 'type': 'SkuSetting'}, + "capacity": {"key": "capacity", "type": "SkuCapacity"}, + "resource_type": {"key": "resourceType", "type": "str"}, + "sku": {"key": "sku", "type": "SkuSetting"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword capacity: Gets or sets the Sku Capacity. :paramtype capacity: ~azure.mgmt.machinelearningservices.models.SkuCapacity @@ -30775,9 +29060,9 @@ def __init__( :paramtype sku: ~azure.mgmt.machinelearningservices.models.SkuSetting """ super(SkuResource, self).__init__(**kwargs) - self.capacity = kwargs.get('capacity', None) + self.capacity = kwargs.get("capacity", None) self.resource_type = None - self.sku = kwargs.get('sku', None) + self.sku = kwargs.get("sku", None) class SkuResourceArmPaginatedResult(msrest.serialization.Model): @@ -30791,14 +29076,11 @@ class SkuResourceArmPaginatedResult(msrest.serialization.Model): """ _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[SkuResource]'}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[SkuResource]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword next_link: The link to the next page of SkuResource objects. If null, there are no additional pages. @@ -30807,8 +29089,8 @@ def __init__( :paramtype value: list[~azure.mgmt.machinelearningservices.models.SkuResource] """ super(SkuResourceArmPaginatedResult, self).__init__(**kwargs) - self.next_link = kwargs.get('next_link', None) - self.value = kwargs.get('value', None) + self.next_link = kwargs.get("next_link", None) + self.value = kwargs.get("value", None) class SkuSetting(msrest.serialization.Model): @@ -30826,18 +29108,15 @@ class SkuSetting(msrest.serialization.Model): """ _validation = { - 'name': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "name": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'tier': {'key': 'tier', 'type': 'str'}, + "name": {"key": "name", "type": "str"}, + "tier": {"key": "tier", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword name: Required. [Required] The name of the SKU. Ex - P3. It is typically a letter+number code. @@ -30848,8 +29127,8 @@ def __init__( :paramtype tier: str or ~azure.mgmt.machinelearningservices.models.SkuTier """ super(SkuSetting, self).__init__(**kwargs) - self.name = kwargs['name'] - self.tier = kwargs.get('tier', None) + self.name = kwargs["name"] + self.tier = kwargs.get("tier", None) class SparkJob(JobBaseProperties): @@ -30927,47 +29206,44 @@ class SparkJob(JobBaseProperties): """ _validation = { - 'job_type': {'required': True}, - 'status': {'readonly': True}, - 'code_id': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'entry': {'required': True}, - } - - _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'component_id': {'key': 'componentId', 'type': 'str'}, - 'compute_id': {'key': 'computeId', 'type': 'str'}, - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'experiment_name': {'key': 'experimentName', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'IdentityConfiguration'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'job_type': {'key': 'jobType', 'type': 'str'}, - 'notification_setting': {'key': 'notificationSetting', 'type': 'NotificationSetting'}, - 'secrets_configuration': {'key': 'secretsConfiguration', 'type': '{SecretConfiguration}'}, - 'services': {'key': 'services', 'type': '{JobService}'}, - 'status': {'key': 'status', 'type': 'str'}, - 'archives': {'key': 'archives', 'type': '[str]'}, - 'args': {'key': 'args', 'type': 'str'}, - 'code_id': {'key': 'codeId', 'type': 'str'}, - 'conf': {'key': 'conf', 'type': '{str}'}, - 'entry': {'key': 'entry', 'type': 'SparkJobEntry'}, - 'environment_id': {'key': 'environmentId', 'type': 'str'}, - 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, - 'files': {'key': 'files', 'type': '[str]'}, - 'inputs': {'key': 'inputs', 'type': '{JobInput}'}, - 'jars': {'key': 'jars', 'type': '[str]'}, - 'outputs': {'key': 'outputs', 'type': '{JobOutput}'}, - 'py_files': {'key': 'pyFiles', 'type': '[str]'}, - 'queue_settings': {'key': 'queueSettings', 'type': 'QueueSettings'}, - 'resources': {'key': 'resources', 'type': 'SparkResourceConfiguration'}, - } - - def __init__( - self, - **kwargs - ): + "job_type": {"required": True}, + "status": {"readonly": True}, + "code_id": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "entry": {"required": True}, + } + + _attribute_map = { + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "component_id": {"key": "componentId", "type": "str"}, + "compute_id": {"key": "computeId", "type": "str"}, + "display_name": {"key": "displayName", "type": "str"}, + "experiment_name": {"key": "experimentName", "type": "str"}, + "identity": {"key": "identity", "type": "IdentityConfiguration"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "job_type": {"key": "jobType", "type": "str"}, + "notification_setting": {"key": "notificationSetting", "type": "NotificationSetting"}, + "secrets_configuration": {"key": "secretsConfiguration", "type": "{SecretConfiguration}"}, + "services": {"key": "services", "type": "{JobService}"}, + "status": {"key": "status", "type": "str"}, + "archives": {"key": "archives", "type": "[str]"}, + "args": {"key": "args", "type": "str"}, + "code_id": {"key": "codeId", "type": "str"}, + "conf": {"key": "conf", "type": "{str}"}, + "entry": {"key": "entry", "type": "SparkJobEntry"}, + "environment_id": {"key": "environmentId", "type": "str"}, + "environment_variables": {"key": "environmentVariables", "type": "{str}"}, + "files": {"key": "files", "type": "[str]"}, + "inputs": {"key": "inputs", "type": "{JobInput}"}, + "jars": {"key": "jars", "type": "[str]"}, + "outputs": {"key": "outputs", "type": "{JobOutput}"}, + "py_files": {"key": "pyFiles", "type": "[str]"}, + "queue_settings": {"key": "queueSettings", "type": "QueueSettings"}, + "resources": {"key": "resources", "type": "SparkResourceConfiguration"}, + } + + def __init__(self, **kwargs): """ :keyword description: The asset description text. :paramtype description: str @@ -31028,21 +29304,21 @@ def __init__( :paramtype resources: ~azure.mgmt.machinelearningservices.models.SparkResourceConfiguration """ super(SparkJob, self).__init__(**kwargs) - self.job_type = 'Spark' # type: str - self.archives = kwargs.get('archives', None) - self.args = kwargs.get('args', None) - self.code_id = kwargs['code_id'] - self.conf = kwargs.get('conf', None) - self.entry = kwargs['entry'] - self.environment_id = kwargs.get('environment_id', None) - self.environment_variables = kwargs.get('environment_variables', None) - self.files = kwargs.get('files', None) - self.inputs = kwargs.get('inputs', None) - self.jars = kwargs.get('jars', None) - self.outputs = kwargs.get('outputs', None) - self.py_files = kwargs.get('py_files', None) - self.queue_settings = kwargs.get('queue_settings', None) - self.resources = kwargs.get('resources', None) + self.job_type = "Spark" # type: str + self.archives = kwargs.get("archives", None) + self.args = kwargs.get("args", None) + self.code_id = kwargs["code_id"] + self.conf = kwargs.get("conf", None) + self.entry = kwargs["entry"] + self.environment_id = kwargs.get("environment_id", None) + self.environment_variables = kwargs.get("environment_variables", None) + self.files = kwargs.get("files", None) + self.inputs = kwargs.get("inputs", None) + self.jars = kwargs.get("jars", None) + self.outputs = kwargs.get("outputs", None) + self.py_files = kwargs.get("py_files", None) + self.queue_settings = kwargs.get("queue_settings", None) + self.resources = kwargs.get("resources", None) class SparkJobEntry(msrest.serialization.Model): @@ -31060,24 +29336,22 @@ class SparkJobEntry(msrest.serialization.Model): """ _validation = { - 'spark_job_entry_type': {'required': True}, + "spark_job_entry_type": {"required": True}, } _attribute_map = { - 'spark_job_entry_type': {'key': 'sparkJobEntryType', 'type': 'str'}, + "spark_job_entry_type": {"key": "sparkJobEntryType", "type": "str"}, } _subtype_map = { - 'spark_job_entry_type': {'SparkJobPythonEntry': 'SparkJobPythonEntry', - 'SparkJobScalaEntry': 'SparkJobScalaEntry'} + "spark_job_entry_type": { + "SparkJobPythonEntry": "SparkJobPythonEntry", + "SparkJobScalaEntry": "SparkJobScalaEntry", + } } - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(SparkJobEntry, self).__init__(**kwargs) self.spark_job_entry_type = None # type: Optional[str] @@ -31096,26 +29370,23 @@ class SparkJobPythonEntry(SparkJobEntry): """ _validation = { - 'spark_job_entry_type': {'required': True}, - 'file': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "spark_job_entry_type": {"required": True}, + "file": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'spark_job_entry_type': {'key': 'sparkJobEntryType', 'type': 'str'}, - 'file': {'key': 'file', 'type': 'str'}, + "spark_job_entry_type": {"key": "sparkJobEntryType", "type": "str"}, + "file": {"key": "file", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword file: Required. [Required] Relative python file path for job entry point. :paramtype file: str """ super(SparkJobPythonEntry, self).__init__(**kwargs) - self.spark_job_entry_type = 'SparkJobPythonEntry' # type: str - self.file = kwargs['file'] + self.spark_job_entry_type = "SparkJobPythonEntry" # type: str + self.file = kwargs["file"] class SparkJobScalaEntry(SparkJobEntry): @@ -31132,26 +29403,23 @@ class SparkJobScalaEntry(SparkJobEntry): """ _validation = { - 'spark_job_entry_type': {'required': True}, - 'class_name': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "spark_job_entry_type": {"required": True}, + "class_name": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'spark_job_entry_type': {'key': 'sparkJobEntryType', 'type': 'str'}, - 'class_name': {'key': 'className', 'type': 'str'}, + "spark_job_entry_type": {"key": "sparkJobEntryType", "type": "str"}, + "class_name": {"key": "className", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword class_name: Required. [Required] Scala class name used as entry point. :paramtype class_name: str """ super(SparkJobScalaEntry, self).__init__(**kwargs) - self.spark_job_entry_type = 'SparkJobScalaEntry' # type: str - self.class_name = kwargs['class_name'] + self.spark_job_entry_type = "SparkJobScalaEntry" # type: str + self.class_name = kwargs["class_name"] class SparkResourceConfiguration(msrest.serialization.Model): @@ -31164,14 +29432,11 @@ class SparkResourceConfiguration(msrest.serialization.Model): """ _attribute_map = { - 'instance_type': {'key': 'instanceType', 'type': 'str'}, - 'runtime_version': {'key': 'runtimeVersion', 'type': 'str'}, + "instance_type": {"key": "instanceType", "type": "str"}, + "runtime_version": {"key": "runtimeVersion", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword instance_type: Optional type of VM used as supported by the compute target. :paramtype instance_type: str @@ -31179,12 +29444,13 @@ def __init__( :paramtype runtime_version: str """ super(SparkResourceConfiguration, self).__init__(**kwargs) - self.instance_type = kwargs.get('instance_type', None) - self.runtime_version = kwargs.get('runtime_version', "3.1") + self.instance_type = kwargs.get("instance_type", None) + self.runtime_version = kwargs.get("runtime_version", "3.1") -class SpeechEndpointDeploymentResourceProperties(EndpointDeploymentResourceProperties, - CognitiveServiceEndpointDeploymentResourceProperties): +class SpeechEndpointDeploymentResourceProperties( + EndpointDeploymentResourceProperties, CognitiveServiceEndpointDeploymentResourceProperties +): """SpeechEndpointDeploymentResourceProperties. Variables are only populated by the server, and will be ignored when sending a request. @@ -31212,25 +29478,22 @@ class SpeechEndpointDeploymentResourceProperties(EndpointDeploymentResourcePrope """ _validation = { - 'model': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'type': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9._]'}, + "model": {"required": True}, + "provisioning_state": {"readonly": True}, + "type": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9._]"}, } _attribute_map = { - 'model': {'key': 'model', 'type': 'EndpointDeploymentModel'}, - 'rai_policy_name': {'key': 'raiPolicyName', 'type': 'str'}, - 'sku': {'key': 'sku', 'type': 'CognitiveServicesSku'}, - 'version_upgrade_option': {'key': 'versionUpgradeOption', 'type': 'str'}, - 'failure_reason': {'key': 'failureReason', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, + "model": {"key": "model", "type": "EndpointDeploymentModel"}, + "rai_policy_name": {"key": "raiPolicyName", "type": "str"}, + "sku": {"key": "sku", "type": "CognitiveServicesSku"}, + "version_upgrade_option": {"key": "versionUpgradeOption", "type": "str"}, + "failure_reason": {"key": "failureReason", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "type": {"key": "type", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword model: Required. Model used for the endpoint deployment. :paramtype model: ~azure.mgmt.machinelearningservices.models.EndpointDeploymentModel @@ -31246,12 +29509,12 @@ def __init__( :paramtype failure_reason: str """ super(SpeechEndpointDeploymentResourceProperties, self).__init__(**kwargs) - self.model = kwargs['model'] - self.rai_policy_name = kwargs.get('rai_policy_name', None) - self.sku = kwargs.get('sku', None) - self.version_upgrade_option = kwargs.get('version_upgrade_option', None) - self.type = 'Azure.Speech' # type: str - self.failure_reason = kwargs.get('failure_reason', None) + self.model = kwargs["model"] + self.rai_policy_name = kwargs.get("rai_policy_name", None) + self.sku = kwargs.get("sku", None) + self.version_upgrade_option = kwargs.get("version_upgrade_option", None) + self.type = "Azure.Speech" # type: str + self.failure_reason = kwargs.get("failure_reason", None) self.provisioning_state = None @@ -31282,23 +29545,20 @@ class SpeechEndpointResourceProperties(EndpointResourceProperties): """ _validation = { - 'endpoint_type': {'required': True}, - 'provisioning_state': {'readonly': True}, + "endpoint_type": {"required": True}, + "provisioning_state": {"readonly": True}, } _attribute_map = { - 'associated_resource_id': {'key': 'associatedResourceId', 'type': 'str'}, - 'endpoint_type': {'key': 'endpointType', 'type': 'str'}, - 'endpoint_uri': {'key': 'endpointUri', 'type': 'str'}, - 'failure_reason': {'key': 'failureReason', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + "associated_resource_id": {"key": "associatedResourceId", "type": "str"}, + "endpoint_type": {"key": "endpointType", "type": "str"}, + "endpoint_uri": {"key": "endpointUri", "type": "str"}, + "failure_reason": {"key": "failureReason", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword associated_resource_id: Byo resource id for creating the built-in model service endpoints. @@ -31311,7 +29571,7 @@ def __init__( :paramtype name: str """ super(SpeechEndpointResourceProperties, self).__init__(**kwargs) - self.endpoint_type = 'Azure.Speech' # type: str + self.endpoint_type = "Azure.Speech" # type: str class SslConfiguration(msrest.serialization.Model): @@ -31333,18 +29593,15 @@ class SslConfiguration(msrest.serialization.Model): """ _attribute_map = { - 'status': {'key': 'status', 'type': 'str'}, - 'cert': {'key': 'cert', 'type': 'str'}, - 'key': {'key': 'key', 'type': 'str'}, - 'cname': {'key': 'cname', 'type': 'str'}, - 'leaf_domain_label': {'key': 'leafDomainLabel', 'type': 'str'}, - 'overwrite_existing_domain': {'key': 'overwriteExistingDomain', 'type': 'bool'}, + "status": {"key": "status", "type": "str"}, + "cert": {"key": "cert", "type": "str"}, + "key": {"key": "key", "type": "str"}, + "cname": {"key": "cname", "type": "str"}, + "leaf_domain_label": {"key": "leafDomainLabel", "type": "str"}, + "overwrite_existing_domain": {"key": "overwriteExistingDomain", "type": "bool"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword status: Enable or disable ssl for scoring. Possible values include: "Disabled", "Enabled", "Auto". @@ -31361,12 +29618,12 @@ def __init__( :paramtype overwrite_existing_domain: bool """ super(SslConfiguration, self).__init__(**kwargs) - self.status = kwargs.get('status', None) - self.cert = kwargs.get('cert', None) - self.key = kwargs.get('key', None) - self.cname = kwargs.get('cname', None) - self.leaf_domain_label = kwargs.get('leaf_domain_label', None) - self.overwrite_existing_domain = kwargs.get('overwrite_existing_domain', None) + self.status = kwargs.get("status", None) + self.cert = kwargs.get("cert", None) + self.key = kwargs.get("key", None) + self.cname = kwargs.get("cname", None) + self.leaf_domain_label = kwargs.get("leaf_domain_label", None) + self.overwrite_existing_domain = kwargs.get("overwrite_existing_domain", None) class StackEnsembleSettings(msrest.serialization.Model): @@ -31388,15 +29645,12 @@ class StackEnsembleSettings(msrest.serialization.Model): """ _attribute_map = { - 'stack_meta_learner_k_wargs': {'key': 'stackMetaLearnerKWargs', 'type': 'object'}, - 'stack_meta_learner_train_percentage': {'key': 'stackMetaLearnerTrainPercentage', 'type': 'float'}, - 'stack_meta_learner_type': {'key': 'stackMetaLearnerType', 'type': 'str'}, + "stack_meta_learner_k_wargs": {"key": "stackMetaLearnerKWargs", "type": "object"}, + "stack_meta_learner_train_percentage": {"key": "stackMetaLearnerTrainPercentage", "type": "float"}, + "stack_meta_learner_type": {"key": "stackMetaLearnerType", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword stack_meta_learner_k_wargs: Optional parameters to pass to the initializer of the meta-learner. @@ -31413,9 +29667,9 @@ def __init__( ~azure.mgmt.machinelearningservices.models.StackMetaLearnerType """ super(StackEnsembleSettings, self).__init__(**kwargs) - self.stack_meta_learner_k_wargs = kwargs.get('stack_meta_learner_k_wargs', None) - self.stack_meta_learner_train_percentage = kwargs.get('stack_meta_learner_train_percentage', 0.2) - self.stack_meta_learner_type = kwargs.get('stack_meta_learner_type', None) + self.stack_meta_learner_k_wargs = kwargs.get("stack_meta_learner_k_wargs", None) + self.stack_meta_learner_train_percentage = kwargs.get("stack_meta_learner_train_percentage", 0.2) + self.stack_meta_learner_type = kwargs.get("stack_meta_learner_type", None) class StaticInputData(MonitoringInputDataBase): @@ -31446,28 +29700,25 @@ class StaticInputData(MonitoringInputDataBase): """ _validation = { - 'input_data_type': {'required': True}, - 'job_input_type': {'required': True}, - 'uri': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'window_end': {'required': True}, - 'window_start': {'required': True}, + "input_data_type": {"required": True}, + "job_input_type": {"required": True}, + "uri": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "window_end": {"required": True}, + "window_start": {"required": True}, } _attribute_map = { - 'columns': {'key': 'columns', 'type': '{str}'}, - 'data_context': {'key': 'dataContext', 'type': 'str'}, - 'input_data_type': {'key': 'inputDataType', 'type': 'str'}, - 'job_input_type': {'key': 'jobInputType', 'type': 'str'}, - 'uri': {'key': 'uri', 'type': 'str'}, - 'preprocessing_component_id': {'key': 'preprocessingComponentId', 'type': 'str'}, - 'window_end': {'key': 'windowEnd', 'type': 'iso-8601'}, - 'window_start': {'key': 'windowStart', 'type': 'iso-8601'}, + "columns": {"key": "columns", "type": "{str}"}, + "data_context": {"key": "dataContext", "type": "str"}, + "input_data_type": {"key": "inputDataType", "type": "str"}, + "job_input_type": {"key": "jobInputType", "type": "str"}, + "uri": {"key": "uri", "type": "str"}, + "preprocessing_component_id": {"key": "preprocessingComponentId", "type": "str"}, + "window_end": {"key": "windowEnd", "type": "iso-8601"}, + "window_start": {"key": "windowStart", "type": "iso-8601"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword columns: Mapping of column names to special uses. :paramtype columns: dict[str, str] @@ -31488,10 +29739,10 @@ def __init__( :paramtype window_start: ~datetime.datetime """ super(StaticInputData, self).__init__(**kwargs) - self.input_data_type = 'Static' # type: str - self.preprocessing_component_id = kwargs.get('preprocessing_component_id', None) - self.window_end = kwargs['window_end'] - self.window_start = kwargs['window_start'] + self.input_data_type = "Static" # type: str + self.preprocessing_component_id = kwargs.get("preprocessing_component_id", None) + self.window_end = kwargs["window_end"] + self.window_start = kwargs["window_start"] class StatusMessage(msrest.serialization.Model): @@ -31511,25 +29762,21 @@ class StatusMessage(msrest.serialization.Model): """ _validation = { - 'code': {'readonly': True}, - 'created_date_time': {'readonly': True}, - 'level': {'readonly': True}, - 'message': {'readonly': True}, + "code": {"readonly": True}, + "created_date_time": {"readonly": True}, + "level": {"readonly": True}, + "message": {"readonly": True}, } _attribute_map = { - 'code': {'key': 'code', 'type': 'str'}, - 'created_date_time': {'key': 'createdDateTime', 'type': 'iso-8601'}, - 'level': {'key': 'level', 'type': 'str'}, - 'message': {'key': 'message', 'type': 'str'}, + "code": {"key": "code", "type": "str"}, + "created_date_time": {"key": "createdDateTime", "type": "iso-8601"}, + "level": {"key": "level", "type": "str"}, + "message": {"key": "message", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(StatusMessage, self).__init__(**kwargs) self.code = None self.created_date_time = None @@ -31551,14 +29798,11 @@ class StorageAccountDetails(msrest.serialization.Model): """ _attribute_map = { - 'system_created_storage_account': {'key': 'systemCreatedStorageAccount', 'type': 'SystemCreatedStorageAccount'}, - 'user_created_storage_account': {'key': 'userCreatedStorageAccount', 'type': 'UserCreatedStorageAccount'}, + "system_created_storage_account": {"key": "systemCreatedStorageAccount", "type": "SystemCreatedStorageAccount"}, + "user_created_storage_account": {"key": "userCreatedStorageAccount", "type": "UserCreatedStorageAccount"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword system_created_storage_account: Details of system created storage account to be used for the registry. @@ -31570,8 +29814,8 @@ def __init__( ~azure.mgmt.machinelearningservices.models.UserCreatedStorageAccount """ super(StorageAccountDetails, self).__init__(**kwargs) - self.system_created_storage_account = kwargs.get('system_created_storage_account', None) - self.user_created_storage_account = kwargs.get('user_created_storage_account', None) + self.system_created_storage_account = kwargs.get("system_created_storage_account", None) + self.user_created_storage_account = kwargs.get("user_created_storage_account", None) class SweepJob(JobBaseProperties): @@ -31646,46 +29890,43 @@ class SweepJob(JobBaseProperties): """ _validation = { - 'job_type': {'required': True}, - 'status': {'readonly': True}, - 'objective': {'required': True}, - 'sampling_algorithm': {'required': True}, - 'search_space': {'required': True}, - 'trial': {'required': True}, - } - - _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'component_id': {'key': 'componentId', 'type': 'str'}, - 'compute_id': {'key': 'computeId', 'type': 'str'}, - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'experiment_name': {'key': 'experimentName', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'IdentityConfiguration'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'job_type': {'key': 'jobType', 'type': 'str'}, - 'notification_setting': {'key': 'notificationSetting', 'type': 'NotificationSetting'}, - 'secrets_configuration': {'key': 'secretsConfiguration', 'type': '{SecretConfiguration}'}, - 'services': {'key': 'services', 'type': '{JobService}'}, - 'status': {'key': 'status', 'type': 'str'}, - 'component_configuration': {'key': 'componentConfiguration', 'type': 'ComponentConfiguration'}, - 'early_termination': {'key': 'earlyTermination', 'type': 'EarlyTerminationPolicy'}, - 'inputs': {'key': 'inputs', 'type': '{JobInput}'}, - 'limits': {'key': 'limits', 'type': 'SweepJobLimits'}, - 'objective': {'key': 'objective', 'type': 'Objective'}, - 'outputs': {'key': 'outputs', 'type': '{JobOutput}'}, - 'queue_settings': {'key': 'queueSettings', 'type': 'QueueSettings'}, - 'resources': {'key': 'resources', 'type': 'JobResourceConfiguration'}, - 'sampling_algorithm': {'key': 'samplingAlgorithm', 'type': 'SamplingAlgorithm'}, - 'search_space': {'key': 'searchSpace', 'type': 'object'}, - 'trial': {'key': 'trial', 'type': 'TrialComponent'}, - } - - def __init__( - self, - **kwargs - ): + "job_type": {"required": True}, + "status": {"readonly": True}, + "objective": {"required": True}, + "sampling_algorithm": {"required": True}, + "search_space": {"required": True}, + "trial": {"required": True}, + } + + _attribute_map = { + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "component_id": {"key": "componentId", "type": "str"}, + "compute_id": {"key": "computeId", "type": "str"}, + "display_name": {"key": "displayName", "type": "str"}, + "experiment_name": {"key": "experimentName", "type": "str"}, + "identity": {"key": "identity", "type": "IdentityConfiguration"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "job_type": {"key": "jobType", "type": "str"}, + "notification_setting": {"key": "notificationSetting", "type": "NotificationSetting"}, + "secrets_configuration": {"key": "secretsConfiguration", "type": "{SecretConfiguration}"}, + "services": {"key": "services", "type": "{JobService}"}, + "status": {"key": "status", "type": "str"}, + "component_configuration": {"key": "componentConfiguration", "type": "ComponentConfiguration"}, + "early_termination": {"key": "earlyTermination", "type": "EarlyTerminationPolicy"}, + "inputs": {"key": "inputs", "type": "{JobInput}"}, + "limits": {"key": "limits", "type": "SweepJobLimits"}, + "objective": {"key": "objective", "type": "Objective"}, + "outputs": {"key": "outputs", "type": "{JobOutput}"}, + "queue_settings": {"key": "queueSettings", "type": "QueueSettings"}, + "resources": {"key": "resources", "type": "JobResourceConfiguration"}, + "sampling_algorithm": {"key": "samplingAlgorithm", "type": "SamplingAlgorithm"}, + "search_space": {"key": "searchSpace", "type": "object"}, + "trial": {"key": "trial", "type": "TrialComponent"}, + } + + def __init__(self, **kwargs): """ :keyword description: The asset description text. :paramtype description: str @@ -31743,18 +29984,18 @@ def __init__( :paramtype trial: ~azure.mgmt.machinelearningservices.models.TrialComponent """ super(SweepJob, self).__init__(**kwargs) - self.job_type = 'Sweep' # type: str - self.component_configuration = kwargs.get('component_configuration', None) - self.early_termination = kwargs.get('early_termination', None) - self.inputs = kwargs.get('inputs', None) - self.limits = kwargs.get('limits', None) - self.objective = kwargs['objective'] - self.outputs = kwargs.get('outputs', None) - self.queue_settings = kwargs.get('queue_settings', None) - self.resources = kwargs.get('resources', None) - self.sampling_algorithm = kwargs['sampling_algorithm'] - self.search_space = kwargs['search_space'] - self.trial = kwargs['trial'] + self.job_type = "Sweep" # type: str + self.component_configuration = kwargs.get("component_configuration", None) + self.early_termination = kwargs.get("early_termination", None) + self.inputs = kwargs.get("inputs", None) + self.limits = kwargs.get("limits", None) + self.objective = kwargs["objective"] + self.outputs = kwargs.get("outputs", None) + self.queue_settings = kwargs.get("queue_settings", None) + self.resources = kwargs.get("resources", None) + self.sampling_algorithm = kwargs["sampling_algorithm"] + self.search_space = kwargs["search_space"] + self.trial = kwargs["trial"] class SweepJobLimits(JobLimits): @@ -31777,21 +30018,18 @@ class SweepJobLimits(JobLimits): """ _validation = { - 'job_limits_type': {'required': True}, + "job_limits_type": {"required": True}, } _attribute_map = { - 'job_limits_type': {'key': 'jobLimitsType', 'type': 'str'}, - 'timeout': {'key': 'timeout', 'type': 'duration'}, - 'max_concurrent_trials': {'key': 'maxConcurrentTrials', 'type': 'int'}, - 'max_total_trials': {'key': 'maxTotalTrials', 'type': 'int'}, - 'trial_timeout': {'key': 'trialTimeout', 'type': 'duration'}, + "job_limits_type": {"key": "jobLimitsType", "type": "str"}, + "timeout": {"key": "timeout", "type": "duration"}, + "max_concurrent_trials": {"key": "maxConcurrentTrials", "type": "int"}, + "max_total_trials": {"key": "maxTotalTrials", "type": "int"}, + "trial_timeout": {"key": "trialTimeout", "type": "duration"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword timeout: The max run duration in ISO 8601 format, after which the job will be cancelled. Only supports duration with precision as low as Seconds. @@ -31804,10 +30042,10 @@ def __init__( :paramtype trial_timeout: ~datetime.timedelta """ super(SweepJobLimits, self).__init__(**kwargs) - self.job_limits_type = 'Sweep' # type: str - self.max_concurrent_trials = kwargs.get('max_concurrent_trials', None) - self.max_total_trials = kwargs.get('max_total_trials', None) - self.trial_timeout = kwargs.get('trial_timeout', None) + self.job_limits_type = "Sweep" # type: str + self.max_concurrent_trials = kwargs.get("max_concurrent_trials", None) + self.max_total_trials = kwargs.get("max_total_trials", None) + self.trial_timeout = kwargs.get("trial_timeout", None) class SynapseSpark(Compute): @@ -31849,32 +30087,29 @@ class SynapseSpark(Compute): """ _validation = { - 'compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, + "compute_type": {"required": True}, + "provisioning_state": {"readonly": True}, + "created_on": {"readonly": True}, + "modified_on": {"readonly": True}, + "provisioning_errors": {"readonly": True}, + "is_attached_compute": {"readonly": True}, } _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, - 'properties': {'key': 'properties', 'type': 'SynapseSparkProperties'}, + "compute_type": {"key": "computeType", "type": "str"}, + "compute_location": {"key": "computeLocation", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "created_on": {"key": "createdOn", "type": "iso-8601"}, + "modified_on": {"key": "modifiedOn", "type": "iso-8601"}, + "resource_id": {"key": "resourceId", "type": "str"}, + "provisioning_errors": {"key": "provisioningErrors", "type": "[ErrorResponse]"}, + "is_attached_compute": {"key": "isAttachedCompute", "type": "bool"}, + "disable_local_auth": {"key": "disableLocalAuth", "type": "bool"}, + "properties": {"key": "properties", "type": "SynapseSparkProperties"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword compute_location: Location for the underlying compute. :paramtype compute_location: str @@ -31889,8 +30124,8 @@ def __init__( :paramtype properties: ~azure.mgmt.machinelearningservices.models.SynapseSparkProperties """ super(SynapseSpark, self).__init__(**kwargs) - self.compute_type = 'SynapseSpark' # type: str - self.properties = kwargs.get('properties', None) + self.compute_type = "SynapseSpark" # type: str + self.properties = kwargs.get("properties", None) class SynapseSparkProperties(msrest.serialization.Model): @@ -31919,22 +30154,19 @@ class SynapseSparkProperties(msrest.serialization.Model): """ _attribute_map = { - 'auto_scale_properties': {'key': 'autoScaleProperties', 'type': 'AutoScaleProperties'}, - 'auto_pause_properties': {'key': 'autoPauseProperties', 'type': 'AutoPauseProperties'}, - 'spark_version': {'key': 'sparkVersion', 'type': 'str'}, - 'node_count': {'key': 'nodeCount', 'type': 'int'}, - 'node_size': {'key': 'nodeSize', 'type': 'str'}, - 'node_size_family': {'key': 'nodeSizeFamily', 'type': 'str'}, - 'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, - 'resource_group': {'key': 'resourceGroup', 'type': 'str'}, - 'workspace_name': {'key': 'workspaceName', 'type': 'str'}, - 'pool_name': {'key': 'poolName', 'type': 'str'}, + "auto_scale_properties": {"key": "autoScaleProperties", "type": "AutoScaleProperties"}, + "auto_pause_properties": {"key": "autoPauseProperties", "type": "AutoPauseProperties"}, + "spark_version": {"key": "sparkVersion", "type": "str"}, + "node_count": {"key": "nodeCount", "type": "int"}, + "node_size": {"key": "nodeSize", "type": "str"}, + "node_size_family": {"key": "nodeSizeFamily", "type": "str"}, + "subscription_id": {"key": "subscriptionId", "type": "str"}, + "resource_group": {"key": "resourceGroup", "type": "str"}, + "workspace_name": {"key": "workspaceName", "type": "str"}, + "pool_name": {"key": "poolName", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword auto_scale_properties: Auto scale properties. :paramtype auto_scale_properties: @@ -31960,16 +30192,16 @@ def __init__( :paramtype pool_name: str """ super(SynapseSparkProperties, self).__init__(**kwargs) - self.auto_scale_properties = kwargs.get('auto_scale_properties', None) - self.auto_pause_properties = kwargs.get('auto_pause_properties', None) - self.spark_version = kwargs.get('spark_version', None) - self.node_count = kwargs.get('node_count', None) - self.node_size = kwargs.get('node_size', None) - self.node_size_family = kwargs.get('node_size_family', None) - self.subscription_id = kwargs.get('subscription_id', None) - self.resource_group = kwargs.get('resource_group', None) - self.workspace_name = kwargs.get('workspace_name', None) - self.pool_name = kwargs.get('pool_name', None) + self.auto_scale_properties = kwargs.get("auto_scale_properties", None) + self.auto_pause_properties = kwargs.get("auto_pause_properties", None) + self.spark_version = kwargs.get("spark_version", None) + self.node_count = kwargs.get("node_count", None) + self.node_size = kwargs.get("node_size", None) + self.node_size_family = kwargs.get("node_size_family", None) + self.subscription_id = kwargs.get("subscription_id", None) + self.resource_group = kwargs.get("resource_group", None) + self.workspace_name = kwargs.get("workspace_name", None) + self.pool_name = kwargs.get("pool_name", None) class SystemCreatedAcrAccount(msrest.serialization.Model): @@ -31984,15 +30216,12 @@ class SystemCreatedAcrAccount(msrest.serialization.Model): """ _attribute_map = { - 'acr_account_name': {'key': 'acrAccountName', 'type': 'str'}, - 'acr_account_sku': {'key': 'acrAccountSku', 'type': 'str'}, - 'arm_resource_id': {'key': 'armResourceId', 'type': 'ArmResourceId'}, + "acr_account_name": {"key": "acrAccountName", "type": "str"}, + "acr_account_sku": {"key": "acrAccountSku", "type": "str"}, + "arm_resource_id": {"key": "armResourceId", "type": "ArmResourceId"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword acr_account_name: Name of the ACR account. :paramtype acr_account_name: str @@ -32002,9 +30231,9 @@ def __init__( :paramtype arm_resource_id: ~azure.mgmt.machinelearningservices.models.ArmResourceId """ super(SystemCreatedAcrAccount, self).__init__(**kwargs) - self.acr_account_name = kwargs.get('acr_account_name', None) - self.acr_account_sku = kwargs.get('acr_account_sku', None) - self.arm_resource_id = kwargs.get('arm_resource_id', None) + self.acr_account_name = kwargs.get("acr_account_name", None) + self.acr_account_sku = kwargs.get("acr_account_sku", None) + self.arm_resource_id = kwargs.get("arm_resource_id", None) class SystemCreatedStorageAccount(msrest.serialization.Model): @@ -32031,17 +30260,14 @@ class SystemCreatedStorageAccount(msrest.serialization.Model): """ _attribute_map = { - 'allow_blob_public_access': {'key': 'allowBlobPublicAccess', 'type': 'bool'}, - 'arm_resource_id': {'key': 'armResourceId', 'type': 'ArmResourceId'}, - 'storage_account_hns_enabled': {'key': 'storageAccountHnsEnabled', 'type': 'bool'}, - 'storage_account_name': {'key': 'storageAccountName', 'type': 'str'}, - 'storage_account_type': {'key': 'storageAccountType', 'type': 'str'}, + "allow_blob_public_access": {"key": "allowBlobPublicAccess", "type": "bool"}, + "arm_resource_id": {"key": "armResourceId", "type": "ArmResourceId"}, + "storage_account_hns_enabled": {"key": "storageAccountHnsEnabled", "type": "bool"}, + "storage_account_name": {"key": "storageAccountName", "type": "str"}, + "storage_account_type": {"key": "storageAccountType", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword allow_blob_public_access: Public blob access allowed. :paramtype allow_blob_public_access: bool @@ -32063,11 +30289,11 @@ def __init__( :paramtype storage_account_type: str """ super(SystemCreatedStorageAccount, self).__init__(**kwargs) - self.allow_blob_public_access = kwargs.get('allow_blob_public_access', None) - self.arm_resource_id = kwargs.get('arm_resource_id', None) - self.storage_account_hns_enabled = kwargs.get('storage_account_hns_enabled', None) - self.storage_account_name = kwargs.get('storage_account_name', None) - self.storage_account_type = kwargs.get('storage_account_type', None) + self.allow_blob_public_access = kwargs.get("allow_blob_public_access", None) + self.arm_resource_id = kwargs.get("arm_resource_id", None) + self.storage_account_hns_enabled = kwargs.get("storage_account_hns_enabled", None) + self.storage_account_name = kwargs.get("storage_account_name", None) + self.storage_account_type = kwargs.get("storage_account_type", None) class SystemData(msrest.serialization.Model): @@ -32090,18 +30316,15 @@ class SystemData(msrest.serialization.Model): """ _attribute_map = { - 'created_by': {'key': 'createdBy', 'type': 'str'}, - 'created_by_type': {'key': 'createdByType', 'type': 'str'}, - 'created_at': {'key': 'createdAt', 'type': 'iso-8601'}, - 'last_modified_by': {'key': 'lastModifiedBy', 'type': 'str'}, - 'last_modified_by_type': {'key': 'lastModifiedByType', 'type': 'str'}, - 'last_modified_at': {'key': 'lastModifiedAt', 'type': 'iso-8601'}, + "created_by": {"key": "createdBy", "type": "str"}, + "created_by_type": {"key": "createdByType", "type": "str"}, + "created_at": {"key": "createdAt", "type": "iso-8601"}, + "last_modified_by": {"key": "lastModifiedBy", "type": "str"}, + "last_modified_by_type": {"key": "lastModifiedByType", "type": "str"}, + "last_modified_at": {"key": "lastModifiedAt", "type": "iso-8601"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword created_by: The identity that created the resource. :paramtype created_by: str @@ -32120,12 +30343,12 @@ def __init__( :paramtype last_modified_at: ~datetime.datetime """ super(SystemData, self).__init__(**kwargs) - self.created_by = kwargs.get('created_by', None) - self.created_by_type = kwargs.get('created_by_type', None) - self.created_at = kwargs.get('created_at', None) - self.last_modified_by = kwargs.get('last_modified_by', None) - self.last_modified_by_type = kwargs.get('last_modified_by_type', None) - self.last_modified_at = kwargs.get('last_modified_at', None) + self.created_by = kwargs.get("created_by", None) + self.created_by_type = kwargs.get("created_by_type", None) + self.created_at = kwargs.get("created_at", None) + self.last_modified_by = kwargs.get("last_modified_by", None) + self.last_modified_by_type = kwargs.get("last_modified_by_type", None) + self.last_modified_at = kwargs.get("last_modified_at", None) class SystemService(msrest.serialization.Model): @@ -32142,23 +30365,19 @@ class SystemService(msrest.serialization.Model): """ _validation = { - 'system_service_type': {'readonly': True}, - 'public_ip_address': {'readonly': True}, - 'version': {'readonly': True}, + "system_service_type": {"readonly": True}, + "public_ip_address": {"readonly": True}, + "version": {"readonly": True}, } _attribute_map = { - 'system_service_type': {'key': 'systemServiceType', 'type': 'str'}, - 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'}, - 'version': {'key': 'version', 'type': 'str'}, + "system_service_type": {"key": "systemServiceType", "type": "str"}, + "public_ip_address": {"key": "publicIpAddress", "type": "str"}, + "version": {"key": "version", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(SystemService, self).__init__(**kwargs) self.system_service_type = None self.public_ip_address = None @@ -32213,32 +30432,29 @@ class TableFixedParameters(msrest.serialization.Model): """ _attribute_map = { - 'booster': {'key': 'booster', 'type': 'str'}, - 'boosting_type': {'key': 'boostingType', 'type': 'str'}, - 'grow_policy': {'key': 'growPolicy', 'type': 'str'}, - 'learning_rate': {'key': 'learningRate', 'type': 'float'}, - 'max_bin': {'key': 'maxBin', 'type': 'int'}, - 'max_depth': {'key': 'maxDepth', 'type': 'int'}, - 'max_leaves': {'key': 'maxLeaves', 'type': 'int'}, - 'min_data_in_leaf': {'key': 'minDataInLeaf', 'type': 'int'}, - 'min_split_gain': {'key': 'minSplitGain', 'type': 'float'}, - 'model_name': {'key': 'modelName', 'type': 'str'}, - 'n_estimators': {'key': 'nEstimators', 'type': 'int'}, - 'num_leaves': {'key': 'numLeaves', 'type': 'int'}, - 'preprocessor_name': {'key': 'preprocessorName', 'type': 'str'}, - 'reg_alpha': {'key': 'regAlpha', 'type': 'float'}, - 'reg_lambda': {'key': 'regLambda', 'type': 'float'}, - 'subsample': {'key': 'subsample', 'type': 'float'}, - 'subsample_freq': {'key': 'subsampleFreq', 'type': 'float'}, - 'tree_method': {'key': 'treeMethod', 'type': 'str'}, - 'with_mean': {'key': 'withMean', 'type': 'bool'}, - 'with_std': {'key': 'withStd', 'type': 'bool'}, - } - - def __init__( - self, - **kwargs - ): + "booster": {"key": "booster", "type": "str"}, + "boosting_type": {"key": "boostingType", "type": "str"}, + "grow_policy": {"key": "growPolicy", "type": "str"}, + "learning_rate": {"key": "learningRate", "type": "float"}, + "max_bin": {"key": "maxBin", "type": "int"}, + "max_depth": {"key": "maxDepth", "type": "int"}, + "max_leaves": {"key": "maxLeaves", "type": "int"}, + "min_data_in_leaf": {"key": "minDataInLeaf", "type": "int"}, + "min_split_gain": {"key": "minSplitGain", "type": "float"}, + "model_name": {"key": "modelName", "type": "str"}, + "n_estimators": {"key": "nEstimators", "type": "int"}, + "num_leaves": {"key": "numLeaves", "type": "int"}, + "preprocessor_name": {"key": "preprocessorName", "type": "str"}, + "reg_alpha": {"key": "regAlpha", "type": "float"}, + "reg_lambda": {"key": "regLambda", "type": "float"}, + "subsample": {"key": "subsample", "type": "float"}, + "subsample_freq": {"key": "subsampleFreq", "type": "float"}, + "tree_method": {"key": "treeMethod", "type": "str"}, + "with_mean": {"key": "withMean", "type": "bool"}, + "with_std": {"key": "withStd", "type": "bool"}, + } + + def __init__(self, **kwargs): """ :keyword booster: Specify the boosting type, e.g gbdt for XGBoost. :paramtype booster: str @@ -32284,26 +30500,26 @@ def __init__( :paramtype with_std: bool """ super(TableFixedParameters, self).__init__(**kwargs) - self.booster = kwargs.get('booster', None) - self.boosting_type = kwargs.get('boosting_type', None) - self.grow_policy = kwargs.get('grow_policy', None) - self.learning_rate = kwargs.get('learning_rate', None) - self.max_bin = kwargs.get('max_bin', None) - self.max_depth = kwargs.get('max_depth', None) - self.max_leaves = kwargs.get('max_leaves', None) - self.min_data_in_leaf = kwargs.get('min_data_in_leaf', None) - self.min_split_gain = kwargs.get('min_split_gain', None) - self.model_name = kwargs.get('model_name', None) - self.n_estimators = kwargs.get('n_estimators', None) - self.num_leaves = kwargs.get('num_leaves', None) - self.preprocessor_name = kwargs.get('preprocessor_name', None) - self.reg_alpha = kwargs.get('reg_alpha', None) - self.reg_lambda = kwargs.get('reg_lambda', None) - self.subsample = kwargs.get('subsample', None) - self.subsample_freq = kwargs.get('subsample_freq', None) - self.tree_method = kwargs.get('tree_method', None) - self.with_mean = kwargs.get('with_mean', False) - self.with_std = kwargs.get('with_std', False) + self.booster = kwargs.get("booster", None) + self.boosting_type = kwargs.get("boosting_type", None) + self.grow_policy = kwargs.get("grow_policy", None) + self.learning_rate = kwargs.get("learning_rate", None) + self.max_bin = kwargs.get("max_bin", None) + self.max_depth = kwargs.get("max_depth", None) + self.max_leaves = kwargs.get("max_leaves", None) + self.min_data_in_leaf = kwargs.get("min_data_in_leaf", None) + self.min_split_gain = kwargs.get("min_split_gain", None) + self.model_name = kwargs.get("model_name", None) + self.n_estimators = kwargs.get("n_estimators", None) + self.num_leaves = kwargs.get("num_leaves", None) + self.preprocessor_name = kwargs.get("preprocessor_name", None) + self.reg_alpha = kwargs.get("reg_alpha", None) + self.reg_lambda = kwargs.get("reg_lambda", None) + self.subsample = kwargs.get("subsample", None) + self.subsample_freq = kwargs.get("subsample_freq", None) + self.tree_method = kwargs.get("tree_method", None) + self.with_mean = kwargs.get("with_mean", False) + self.with_std = kwargs.get("with_std", False) class TableParameterSubspace(msrest.serialization.Model): @@ -32354,32 +30570,29 @@ class TableParameterSubspace(msrest.serialization.Model): """ _attribute_map = { - 'booster': {'key': 'booster', 'type': 'str'}, - 'boosting_type': {'key': 'boostingType', 'type': 'str'}, - 'grow_policy': {'key': 'growPolicy', 'type': 'str'}, - 'learning_rate': {'key': 'learningRate', 'type': 'str'}, - 'max_bin': {'key': 'maxBin', 'type': 'str'}, - 'max_depth': {'key': 'maxDepth', 'type': 'str'}, - 'max_leaves': {'key': 'maxLeaves', 'type': 'str'}, - 'min_data_in_leaf': {'key': 'minDataInLeaf', 'type': 'str'}, - 'min_split_gain': {'key': 'minSplitGain', 'type': 'str'}, - 'model_name': {'key': 'modelName', 'type': 'str'}, - 'n_estimators': {'key': 'nEstimators', 'type': 'str'}, - 'num_leaves': {'key': 'numLeaves', 'type': 'str'}, - 'preprocessor_name': {'key': 'preprocessorName', 'type': 'str'}, - 'reg_alpha': {'key': 'regAlpha', 'type': 'str'}, - 'reg_lambda': {'key': 'regLambda', 'type': 'str'}, - 'subsample': {'key': 'subsample', 'type': 'str'}, - 'subsample_freq': {'key': 'subsampleFreq', 'type': 'str'}, - 'tree_method': {'key': 'treeMethod', 'type': 'str'}, - 'with_mean': {'key': 'withMean', 'type': 'str'}, - 'with_std': {'key': 'withStd', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): + "booster": {"key": "booster", "type": "str"}, + "boosting_type": {"key": "boostingType", "type": "str"}, + "grow_policy": {"key": "growPolicy", "type": "str"}, + "learning_rate": {"key": "learningRate", "type": "str"}, + "max_bin": {"key": "maxBin", "type": "str"}, + "max_depth": {"key": "maxDepth", "type": "str"}, + "max_leaves": {"key": "maxLeaves", "type": "str"}, + "min_data_in_leaf": {"key": "minDataInLeaf", "type": "str"}, + "min_split_gain": {"key": "minSplitGain", "type": "str"}, + "model_name": {"key": "modelName", "type": "str"}, + "n_estimators": {"key": "nEstimators", "type": "str"}, + "num_leaves": {"key": "numLeaves", "type": "str"}, + "preprocessor_name": {"key": "preprocessorName", "type": "str"}, + "reg_alpha": {"key": "regAlpha", "type": "str"}, + "reg_lambda": {"key": "regLambda", "type": "str"}, + "subsample": {"key": "subsample", "type": "str"}, + "subsample_freq": {"key": "subsampleFreq", "type": "str"}, + "tree_method": {"key": "treeMethod", "type": "str"}, + "with_mean": {"key": "withMean", "type": "str"}, + "with_std": {"key": "withStd", "type": "str"}, + } + + def __init__(self, **kwargs): """ :keyword booster: Specify the boosting type, e.g gbdt for XGBoost. :paramtype booster: str @@ -32425,26 +30638,26 @@ def __init__( :paramtype with_std: str """ super(TableParameterSubspace, self).__init__(**kwargs) - self.booster = kwargs.get('booster', None) - self.boosting_type = kwargs.get('boosting_type', None) - self.grow_policy = kwargs.get('grow_policy', None) - self.learning_rate = kwargs.get('learning_rate', None) - self.max_bin = kwargs.get('max_bin', None) - self.max_depth = kwargs.get('max_depth', None) - self.max_leaves = kwargs.get('max_leaves', None) - self.min_data_in_leaf = kwargs.get('min_data_in_leaf', None) - self.min_split_gain = kwargs.get('min_split_gain', None) - self.model_name = kwargs.get('model_name', None) - self.n_estimators = kwargs.get('n_estimators', None) - self.num_leaves = kwargs.get('num_leaves', None) - self.preprocessor_name = kwargs.get('preprocessor_name', None) - self.reg_alpha = kwargs.get('reg_alpha', None) - self.reg_lambda = kwargs.get('reg_lambda', None) - self.subsample = kwargs.get('subsample', None) - self.subsample_freq = kwargs.get('subsample_freq', None) - self.tree_method = kwargs.get('tree_method', None) - self.with_mean = kwargs.get('with_mean', None) - self.with_std = kwargs.get('with_std', None) + self.booster = kwargs.get("booster", None) + self.boosting_type = kwargs.get("boosting_type", None) + self.grow_policy = kwargs.get("grow_policy", None) + self.learning_rate = kwargs.get("learning_rate", None) + self.max_bin = kwargs.get("max_bin", None) + self.max_depth = kwargs.get("max_depth", None) + self.max_leaves = kwargs.get("max_leaves", None) + self.min_data_in_leaf = kwargs.get("min_data_in_leaf", None) + self.min_split_gain = kwargs.get("min_split_gain", None) + self.model_name = kwargs.get("model_name", None) + self.n_estimators = kwargs.get("n_estimators", None) + self.num_leaves = kwargs.get("num_leaves", None) + self.preprocessor_name = kwargs.get("preprocessor_name", None) + self.reg_alpha = kwargs.get("reg_alpha", None) + self.reg_lambda = kwargs.get("reg_lambda", None) + self.subsample = kwargs.get("subsample", None) + self.subsample_freq = kwargs.get("subsample_freq", None) + self.tree_method = kwargs.get("tree_method", None) + self.with_mean = kwargs.get("with_mean", None) + self.with_std = kwargs.get("with_std", None) class TableSweepSettings(msrest.serialization.Model): @@ -32461,18 +30674,15 @@ class TableSweepSettings(msrest.serialization.Model): """ _validation = { - 'sampling_algorithm': {'required': True}, + "sampling_algorithm": {"required": True}, } _attribute_map = { - 'early_termination': {'key': 'earlyTermination', 'type': 'EarlyTerminationPolicy'}, - 'sampling_algorithm': {'key': 'samplingAlgorithm', 'type': 'str'}, + "early_termination": {"key": "earlyTermination", "type": "EarlyTerminationPolicy"}, + "sampling_algorithm": {"key": "samplingAlgorithm", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword early_termination: Type of early termination policy for the sweeping job. :paramtype early_termination: ~azure.mgmt.machinelearningservices.models.EarlyTerminationPolicy @@ -32482,8 +30692,8 @@ def __init__( ~azure.mgmt.machinelearningservices.models.SamplingAlgorithmType """ super(TableSweepSettings, self).__init__(**kwargs) - self.early_termination = kwargs.get('early_termination', None) - self.sampling_algorithm = kwargs['sampling_algorithm'] + self.early_termination = kwargs.get("early_termination", None) + self.sampling_algorithm = kwargs["sampling_algorithm"] class TableVerticalFeaturizationSettings(FeaturizationSettings): @@ -32513,18 +30723,15 @@ class TableVerticalFeaturizationSettings(FeaturizationSettings): """ _attribute_map = { - 'dataset_language': {'key': 'datasetLanguage', 'type': 'str'}, - 'blocked_transformers': {'key': 'blockedTransformers', 'type': '[str]'}, - 'column_name_and_types': {'key': 'columnNameAndTypes', 'type': '{str}'}, - 'enable_dnn_featurization': {'key': 'enableDnnFeaturization', 'type': 'bool'}, - 'mode': {'key': 'mode', 'type': 'str'}, - 'transformer_params': {'key': 'transformerParams', 'type': '{[ColumnTransformer]}'}, + "dataset_language": {"key": "datasetLanguage", "type": "str"}, + "blocked_transformers": {"key": "blockedTransformers", "type": "[str]"}, + "column_name_and_types": {"key": "columnNameAndTypes", "type": "{str}"}, + "enable_dnn_featurization": {"key": "enableDnnFeaturization", "type": "bool"}, + "mode": {"key": "mode", "type": "str"}, + "transformer_params": {"key": "transformerParams", "type": "{[ColumnTransformer]}"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword dataset_language: Dataset language, useful for the text data. :paramtype dataset_language: str @@ -32549,11 +30756,11 @@ def __init__( list[~azure.mgmt.machinelearningservices.models.ColumnTransformer]] """ super(TableVerticalFeaturizationSettings, self).__init__(**kwargs) - self.blocked_transformers = kwargs.get('blocked_transformers', None) - self.column_name_and_types = kwargs.get('column_name_and_types', None) - self.enable_dnn_featurization = kwargs.get('enable_dnn_featurization', False) - self.mode = kwargs.get('mode', None) - self.transformer_params = kwargs.get('transformer_params', None) + self.blocked_transformers = kwargs.get("blocked_transformers", None) + self.column_name_and_types = kwargs.get("column_name_and_types", None) + self.enable_dnn_featurization = kwargs.get("enable_dnn_featurization", False) + self.mode = kwargs.get("mode", None) + self.transformer_params = kwargs.get("transformer_params", None) class TableVerticalLimitSettings(msrest.serialization.Model): @@ -32583,22 +30790,19 @@ class TableVerticalLimitSettings(msrest.serialization.Model): """ _attribute_map = { - 'enable_early_termination': {'key': 'enableEarlyTermination', 'type': 'bool'}, - 'exit_score': {'key': 'exitScore', 'type': 'float'}, - 'max_concurrent_trials': {'key': 'maxConcurrentTrials', 'type': 'int'}, - 'max_cores_per_trial': {'key': 'maxCoresPerTrial', 'type': 'int'}, - 'max_nodes': {'key': 'maxNodes', 'type': 'int'}, - 'max_trials': {'key': 'maxTrials', 'type': 'int'}, - 'sweep_concurrent_trials': {'key': 'sweepConcurrentTrials', 'type': 'int'}, - 'sweep_trials': {'key': 'sweepTrials', 'type': 'int'}, - 'timeout': {'key': 'timeout', 'type': 'duration'}, - 'trial_timeout': {'key': 'trialTimeout', 'type': 'duration'}, + "enable_early_termination": {"key": "enableEarlyTermination", "type": "bool"}, + "exit_score": {"key": "exitScore", "type": "float"}, + "max_concurrent_trials": {"key": "maxConcurrentTrials", "type": "int"}, + "max_cores_per_trial": {"key": "maxCoresPerTrial", "type": "int"}, + "max_nodes": {"key": "maxNodes", "type": "int"}, + "max_trials": {"key": "maxTrials", "type": "int"}, + "sweep_concurrent_trials": {"key": "sweepConcurrentTrials", "type": "int"}, + "sweep_trials": {"key": "sweepTrials", "type": "int"}, + "timeout": {"key": "timeout", "type": "duration"}, + "trial_timeout": {"key": "trialTimeout", "type": "duration"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword enable_early_termination: Enable early termination, determines whether or not if AutoMLJob will terminate early if there is no score improvement in last 20 iterations. @@ -32624,16 +30828,16 @@ def __init__( :paramtype trial_timeout: ~datetime.timedelta """ super(TableVerticalLimitSettings, self).__init__(**kwargs) - self.enable_early_termination = kwargs.get('enable_early_termination', True) - self.exit_score = kwargs.get('exit_score', None) - self.max_concurrent_trials = kwargs.get('max_concurrent_trials', 1) - self.max_cores_per_trial = kwargs.get('max_cores_per_trial', -1) - self.max_nodes = kwargs.get('max_nodes', 1) - self.max_trials = kwargs.get('max_trials', 1000) - self.sweep_concurrent_trials = kwargs.get('sweep_concurrent_trials', 0) - self.sweep_trials = kwargs.get('sweep_trials', 0) - self.timeout = kwargs.get('timeout', "PT6H") - self.trial_timeout = kwargs.get('trial_timeout', "PT30M") + self.enable_early_termination = kwargs.get("enable_early_termination", True) + self.exit_score = kwargs.get("exit_score", None) + self.max_concurrent_trials = kwargs.get("max_concurrent_trials", 1) + self.max_cores_per_trial = kwargs.get("max_cores_per_trial", -1) + self.max_nodes = kwargs.get("max_nodes", 1) + self.max_trials = kwargs.get("max_trials", 1000) + self.sweep_concurrent_trials = kwargs.get("sweep_concurrent_trials", 0) + self.sweep_trials = kwargs.get("sweep_trials", 0) + self.timeout = kwargs.get("timeout", "PT6H") + self.trial_timeout = kwargs.get("trial_timeout", "PT30M") class TargetUtilizationScaleSettings(OnlineScaleSettings): @@ -32657,21 +30861,18 @@ class TargetUtilizationScaleSettings(OnlineScaleSettings): """ _validation = { - 'scale_type': {'required': True}, + "scale_type": {"required": True}, } _attribute_map = { - 'scale_type': {'key': 'scaleType', 'type': 'str'}, - 'max_instances': {'key': 'maxInstances', 'type': 'int'}, - 'min_instances': {'key': 'minInstances', 'type': 'int'}, - 'polling_interval': {'key': 'pollingInterval', 'type': 'duration'}, - 'target_utilization_percentage': {'key': 'targetUtilizationPercentage', 'type': 'int'}, + "scale_type": {"key": "scaleType", "type": "str"}, + "max_instances": {"key": "maxInstances", "type": "int"}, + "min_instances": {"key": "minInstances", "type": "int"}, + "polling_interval": {"key": "pollingInterval", "type": "duration"}, + "target_utilization_percentage": {"key": "targetUtilizationPercentage", "type": "int"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword max_instances: The maximum number of instances that the deployment can scale to. The quota will be reserved for max_instances. @@ -32685,11 +30886,11 @@ def __init__( :paramtype target_utilization_percentage: int """ super(TargetUtilizationScaleSettings, self).__init__(**kwargs) - self.scale_type = 'TargetUtilization' # type: str - self.max_instances = kwargs.get('max_instances', 1) - self.min_instances = kwargs.get('min_instances', 1) - self.polling_interval = kwargs.get('polling_interval', "PT1S") - self.target_utilization_percentage = kwargs.get('target_utilization_percentage', 70) + self.scale_type = "TargetUtilization" # type: str + self.max_instances = kwargs.get("max_instances", 1) + self.min_instances = kwargs.get("min_instances", 1) + self.polling_interval = kwargs.get("polling_interval", "PT1S") + self.target_utilization_percentage = kwargs.get("target_utilization_percentage", 70) class TensorFlow(DistributionConfiguration): @@ -32708,19 +30909,16 @@ class TensorFlow(DistributionConfiguration): """ _validation = { - 'distribution_type': {'required': True}, + "distribution_type": {"required": True}, } _attribute_map = { - 'distribution_type': {'key': 'distributionType', 'type': 'str'}, - 'parameter_server_count': {'key': 'parameterServerCount', 'type': 'int'}, - 'worker_count': {'key': 'workerCount', 'type': 'int'}, + "distribution_type": {"key": "distributionType", "type": "str"}, + "parameter_server_count": {"key": "parameterServerCount", "type": "int"}, + "worker_count": {"key": "workerCount", "type": "int"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword parameter_server_count: Number of parameter server tasks. :paramtype parameter_server_count: int @@ -32728,75 +30926,72 @@ def __init__( :paramtype worker_count: int """ super(TensorFlow, self).__init__(**kwargs) - self.distribution_type = 'TensorFlow' # type: str - self.parameter_server_count = kwargs.get('parameter_server_count', 0) - self.worker_count = kwargs.get('worker_count', None) + self.distribution_type = "TensorFlow" # type: str + self.parameter_server_count = kwargs.get("parameter_server_count", 0) + self.worker_count = kwargs.get("worker_count", None) class TextClassification(AutoMLVertical, NlpVertical): """Text Classification task in AutoML NLP vertical. -NLP - Natural Language Processing. + NLP - Natural Language Processing. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to Azure. - :ivar featurization_settings: Featurization inputs needed for AutoML job. - :vartype featurization_settings: - ~azure.mgmt.machinelearningservices.models.NlpVerticalFeaturizationSettings - :ivar fixed_parameters: Model/training parameters that will remain constant throughout - training. - :vartype fixed_parameters: ~azure.mgmt.machinelearningservices.models.NlpFixedParameters - :ivar limit_settings: Execution constraints for AutoMLJob. - :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.NlpVerticalLimitSettings - :ivar search_space: Search space for sampling different combinations of models and their - hyperparameters. - :vartype search_space: list[~azure.mgmt.machinelearningservices.models.NlpParameterSubspace] - :ivar sweep_settings: Settings for model sweeping and hyperparameter tuning. - :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.NlpSweepSettings - :ivar validation_data: Validation data inputs. - :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :ivar log_verbosity: Log verbosity for the job. Possible values include: "NotSet", "Debug", - "Info", "Warning", "Error", "Critical". - :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity - :ivar target_column_name: Target column name: This is prediction values column. - Also known as label column name in context of classification tasks. - :vartype target_column_name: str - :ivar task_type: Required. [Required] Task type for AutoMLJob.Constant filled by server. - Possible values include: "Classification", "Regression", "Forecasting", "ImageClassification", - "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", - "TextClassification", "TextClassificationMultilabel", "TextNER". - :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType - :ivar training_data: Required. [Required] Training data input. - :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :ivar primary_metric: Primary metric for Text-Classification task. Possible values include: - "AUCWeighted", "Accuracy", "NormMacroRecall", "AveragePrecisionScoreWeighted", - "PrecisionScoreWeighted". - :vartype primary_metric: str or - ~azure.mgmt.machinelearningservices.models.ClassificationPrimaryMetrics + :ivar featurization_settings: Featurization inputs needed for AutoML job. + :vartype featurization_settings: + ~azure.mgmt.machinelearningservices.models.NlpVerticalFeaturizationSettings + :ivar fixed_parameters: Model/training parameters that will remain constant throughout + training. + :vartype fixed_parameters: ~azure.mgmt.machinelearningservices.models.NlpFixedParameters + :ivar limit_settings: Execution constraints for AutoMLJob. + :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.NlpVerticalLimitSettings + :ivar search_space: Search space for sampling different combinations of models and their + hyperparameters. + :vartype search_space: list[~azure.mgmt.machinelearningservices.models.NlpParameterSubspace] + :ivar sweep_settings: Settings for model sweeping and hyperparameter tuning. + :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.NlpSweepSettings + :ivar validation_data: Validation data inputs. + :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar log_verbosity: Log verbosity for the job. Possible values include: "NotSet", "Debug", + "Info", "Warning", "Error", "Critical". + :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :ivar target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :vartype target_column_name: str + :ivar task_type: Required. [Required] Task type for AutoMLJob.Constant filled by server. + Possible values include: "Classification", "Regression", "Forecasting", "ImageClassification", + "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", + "TextClassification", "TextClassificationMultilabel", "TextNER". + :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType + :ivar training_data: Required. [Required] Training data input. + :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar primary_metric: Primary metric for Text-Classification task. Possible values include: + "AUCWeighted", "Accuracy", "NormMacroRecall", "AveragePrecisionScoreWeighted", + "PrecisionScoreWeighted". + :vartype primary_metric: str or + ~azure.mgmt.machinelearningservices.models.ClassificationPrimaryMetrics """ _validation = { - 'task_type': {'required': True}, - 'training_data': {'required': True}, + "task_type": {"required": True}, + "training_data": {"required": True}, } _attribute_map = { - 'featurization_settings': {'key': 'featurizationSettings', 'type': 'NlpVerticalFeaturizationSettings'}, - 'fixed_parameters': {'key': 'fixedParameters', 'type': 'NlpFixedParameters'}, - 'limit_settings': {'key': 'limitSettings', 'type': 'NlpVerticalLimitSettings'}, - 'search_space': {'key': 'searchSpace', 'type': '[NlpParameterSubspace]'}, - 'sweep_settings': {'key': 'sweepSettings', 'type': 'NlpSweepSettings'}, - 'validation_data': {'key': 'validationData', 'type': 'MLTableJobInput'}, - 'log_verbosity': {'key': 'logVerbosity', 'type': 'str'}, - 'target_column_name': {'key': 'targetColumnName', 'type': 'str'}, - 'task_type': {'key': 'taskType', 'type': 'str'}, - 'training_data': {'key': 'trainingData', 'type': 'MLTableJobInput'}, - 'primary_metric': {'key': 'primaryMetric', 'type': 'str'}, + "featurization_settings": {"key": "featurizationSettings", "type": "NlpVerticalFeaturizationSettings"}, + "fixed_parameters": {"key": "fixedParameters", "type": "NlpFixedParameters"}, + "limit_settings": {"key": "limitSettings", "type": "NlpVerticalLimitSettings"}, + "search_space": {"key": "searchSpace", "type": "[NlpParameterSubspace]"}, + "sweep_settings": {"key": "sweepSettings", "type": "NlpSweepSettings"}, + "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, + "log_verbosity": {"key": "logVerbosity", "type": "str"}, + "target_column_name": {"key": "targetColumnName", "type": "str"}, + "task_type": {"key": "taskType", "type": "str"}, + "training_data": {"key": "trainingData", "type": "MLTableJobInput"}, + "primary_metric": {"key": "primaryMetric", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword featurization_settings: Featurization inputs needed for AutoML job. :paramtype featurization_settings: @@ -32828,87 +31023,84 @@ def __init__( ~azure.mgmt.machinelearningservices.models.ClassificationPrimaryMetrics """ super(TextClassification, self).__init__(**kwargs) - self.featurization_settings = kwargs.get('featurization_settings', None) - self.fixed_parameters = kwargs.get('fixed_parameters', None) - self.limit_settings = kwargs.get('limit_settings', None) - self.search_space = kwargs.get('search_space', None) - self.sweep_settings = kwargs.get('sweep_settings', None) - self.validation_data = kwargs.get('validation_data', None) - self.task_type = 'TextClassification' # type: str - self.primary_metric = kwargs.get('primary_metric', None) - self.log_verbosity = kwargs.get('log_verbosity', None) - self.target_column_name = kwargs.get('target_column_name', None) - self.training_data = kwargs['training_data'] + self.featurization_settings = kwargs.get("featurization_settings", None) + self.fixed_parameters = kwargs.get("fixed_parameters", None) + self.limit_settings = kwargs.get("limit_settings", None) + self.search_space = kwargs.get("search_space", None) + self.sweep_settings = kwargs.get("sweep_settings", None) + self.validation_data = kwargs.get("validation_data", None) + self.task_type = "TextClassification" # type: str + self.primary_metric = kwargs.get("primary_metric", None) + self.log_verbosity = kwargs.get("log_verbosity", None) + self.target_column_name = kwargs.get("target_column_name", None) + self.training_data = kwargs["training_data"] class TextClassificationMultilabel(AutoMLVertical, NlpVertical): """Text Classification Multilabel task in AutoML NLP vertical. -NLP - Natural Language Processing. + NLP - Natural Language Processing. - Variables are only populated by the server, and will be ignored when sending a request. + Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to Azure. - :ivar featurization_settings: Featurization inputs needed for AutoML job. - :vartype featurization_settings: - ~azure.mgmt.machinelearningservices.models.NlpVerticalFeaturizationSettings - :ivar fixed_parameters: Model/training parameters that will remain constant throughout - training. - :vartype fixed_parameters: ~azure.mgmt.machinelearningservices.models.NlpFixedParameters - :ivar limit_settings: Execution constraints for AutoMLJob. - :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.NlpVerticalLimitSettings - :ivar search_space: Search space for sampling different combinations of models and their - hyperparameters. - :vartype search_space: list[~azure.mgmt.machinelearningservices.models.NlpParameterSubspace] - :ivar sweep_settings: Settings for model sweeping and hyperparameter tuning. - :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.NlpSweepSettings - :ivar validation_data: Validation data inputs. - :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :ivar log_verbosity: Log verbosity for the job. Possible values include: "NotSet", "Debug", - "Info", "Warning", "Error", "Critical". - :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity - :ivar target_column_name: Target column name: This is prediction values column. - Also known as label column name in context of classification tasks. - :vartype target_column_name: str - :ivar task_type: Required. [Required] Task type for AutoMLJob.Constant filled by server. - Possible values include: "Classification", "Regression", "Forecasting", "ImageClassification", - "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", - "TextClassification", "TextClassificationMultilabel", "TextNER". - :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType - :ivar training_data: Required. [Required] Training data input. - :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :ivar primary_metric: Primary metric for Text-Classification-Multilabel task. - Currently only Accuracy is supported as primary metric, hence user need not set it explicitly. - Possible values include: "AUCWeighted", "Accuracy", "NormMacroRecall", - "AveragePrecisionScoreWeighted", "PrecisionScoreWeighted", "IOU". - :vartype primary_metric: str or - ~azure.mgmt.machinelearningservices.models.ClassificationMultilabelPrimaryMetrics + :ivar featurization_settings: Featurization inputs needed for AutoML job. + :vartype featurization_settings: + ~azure.mgmt.machinelearningservices.models.NlpVerticalFeaturizationSettings + :ivar fixed_parameters: Model/training parameters that will remain constant throughout + training. + :vartype fixed_parameters: ~azure.mgmt.machinelearningservices.models.NlpFixedParameters + :ivar limit_settings: Execution constraints for AutoMLJob. + :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.NlpVerticalLimitSettings + :ivar search_space: Search space for sampling different combinations of models and their + hyperparameters. + :vartype search_space: list[~azure.mgmt.machinelearningservices.models.NlpParameterSubspace] + :ivar sweep_settings: Settings for model sweeping and hyperparameter tuning. + :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.NlpSweepSettings + :ivar validation_data: Validation data inputs. + :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar log_verbosity: Log verbosity for the job. Possible values include: "NotSet", "Debug", + "Info", "Warning", "Error", "Critical". + :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :ivar target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :vartype target_column_name: str + :ivar task_type: Required. [Required] Task type for AutoMLJob.Constant filled by server. + Possible values include: "Classification", "Regression", "Forecasting", "ImageClassification", + "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", + "TextClassification", "TextClassificationMultilabel", "TextNER". + :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType + :ivar training_data: Required. [Required] Training data input. + :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar primary_metric: Primary metric for Text-Classification-Multilabel task. + Currently only Accuracy is supported as primary metric, hence user need not set it explicitly. + Possible values include: "AUCWeighted", "Accuracy", "NormMacroRecall", + "AveragePrecisionScoreWeighted", "PrecisionScoreWeighted", "IOU". + :vartype primary_metric: str or + ~azure.mgmt.machinelearningservices.models.ClassificationMultilabelPrimaryMetrics """ _validation = { - 'task_type': {'required': True}, - 'training_data': {'required': True}, - 'primary_metric': {'readonly': True}, + "task_type": {"required": True}, + "training_data": {"required": True}, + "primary_metric": {"readonly": True}, } _attribute_map = { - 'featurization_settings': {'key': 'featurizationSettings', 'type': 'NlpVerticalFeaturizationSettings'}, - 'fixed_parameters': {'key': 'fixedParameters', 'type': 'NlpFixedParameters'}, - 'limit_settings': {'key': 'limitSettings', 'type': 'NlpVerticalLimitSettings'}, - 'search_space': {'key': 'searchSpace', 'type': '[NlpParameterSubspace]'}, - 'sweep_settings': {'key': 'sweepSettings', 'type': 'NlpSweepSettings'}, - 'validation_data': {'key': 'validationData', 'type': 'MLTableJobInput'}, - 'log_verbosity': {'key': 'logVerbosity', 'type': 'str'}, - 'target_column_name': {'key': 'targetColumnName', 'type': 'str'}, - 'task_type': {'key': 'taskType', 'type': 'str'}, - 'training_data': {'key': 'trainingData', 'type': 'MLTableJobInput'}, - 'primary_metric': {'key': 'primaryMetric', 'type': 'str'}, + "featurization_settings": {"key": "featurizationSettings", "type": "NlpVerticalFeaturizationSettings"}, + "fixed_parameters": {"key": "fixedParameters", "type": "NlpFixedParameters"}, + "limit_settings": {"key": "limitSettings", "type": "NlpVerticalLimitSettings"}, + "search_space": {"key": "searchSpace", "type": "[NlpParameterSubspace]"}, + "sweep_settings": {"key": "sweepSettings", "type": "NlpSweepSettings"}, + "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, + "log_verbosity": {"key": "logVerbosity", "type": "str"}, + "target_column_name": {"key": "targetColumnName", "type": "str"}, + "task_type": {"key": "taskType", "type": "str"}, + "training_data": {"key": "trainingData", "type": "MLTableJobInput"}, + "primary_metric": {"key": "primaryMetric", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword featurization_settings: Featurization inputs needed for AutoML job. :paramtype featurization_settings: @@ -32935,88 +31127,85 @@ def __init__( :paramtype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput """ super(TextClassificationMultilabel, self).__init__(**kwargs) - self.featurization_settings = kwargs.get('featurization_settings', None) - self.fixed_parameters = kwargs.get('fixed_parameters', None) - self.limit_settings = kwargs.get('limit_settings', None) - self.search_space = kwargs.get('search_space', None) - self.sweep_settings = kwargs.get('sweep_settings', None) - self.validation_data = kwargs.get('validation_data', None) - self.task_type = 'TextClassificationMultilabel' # type: str + self.featurization_settings = kwargs.get("featurization_settings", None) + self.fixed_parameters = kwargs.get("fixed_parameters", None) + self.limit_settings = kwargs.get("limit_settings", None) + self.search_space = kwargs.get("search_space", None) + self.sweep_settings = kwargs.get("sweep_settings", None) + self.validation_data = kwargs.get("validation_data", None) + self.task_type = "TextClassificationMultilabel" # type: str self.primary_metric = None - self.log_verbosity = kwargs.get('log_verbosity', None) - self.target_column_name = kwargs.get('target_column_name', None) - self.training_data = kwargs['training_data'] + self.log_verbosity = kwargs.get("log_verbosity", None) + self.target_column_name = kwargs.get("target_column_name", None) + self.training_data = kwargs["training_data"] class TextNer(AutoMLVertical, NlpVertical): """Text-NER task in AutoML NLP vertical. -NER - Named Entity Recognition. -NLP - Natural Language Processing. + NER - Named Entity Recognition. + NLP - Natural Language Processing. - Variables are only populated by the server, and will be ignored when sending a request. + Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to Azure. - :ivar featurization_settings: Featurization inputs needed for AutoML job. - :vartype featurization_settings: - ~azure.mgmt.machinelearningservices.models.NlpVerticalFeaturizationSettings - :ivar fixed_parameters: Model/training parameters that will remain constant throughout - training. - :vartype fixed_parameters: ~azure.mgmt.machinelearningservices.models.NlpFixedParameters - :ivar limit_settings: Execution constraints for AutoMLJob. - :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.NlpVerticalLimitSettings - :ivar search_space: Search space for sampling different combinations of models and their - hyperparameters. - :vartype search_space: list[~azure.mgmt.machinelearningservices.models.NlpParameterSubspace] - :ivar sweep_settings: Settings for model sweeping and hyperparameter tuning. - :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.NlpSweepSettings - :ivar validation_data: Validation data inputs. - :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :ivar log_verbosity: Log verbosity for the job. Possible values include: "NotSet", "Debug", - "Info", "Warning", "Error", "Critical". - :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity - :ivar target_column_name: Target column name: This is prediction values column. - Also known as label column name in context of classification tasks. - :vartype target_column_name: str - :ivar task_type: Required. [Required] Task type for AutoMLJob.Constant filled by server. - Possible values include: "Classification", "Regression", "Forecasting", "ImageClassification", - "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", - "TextClassification", "TextClassificationMultilabel", "TextNER". - :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType - :ivar training_data: Required. [Required] Training data input. - :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :ivar primary_metric: Primary metric for Text-NER task. - Only 'Accuracy' is supported for Text-NER, so user need not set this explicitly. Possible - values include: "AUCWeighted", "Accuracy", "NormMacroRecall", "AveragePrecisionScoreWeighted", - "PrecisionScoreWeighted". - :vartype primary_metric: str or - ~azure.mgmt.machinelearningservices.models.ClassificationPrimaryMetrics + :ivar featurization_settings: Featurization inputs needed for AutoML job. + :vartype featurization_settings: + ~azure.mgmt.machinelearningservices.models.NlpVerticalFeaturizationSettings + :ivar fixed_parameters: Model/training parameters that will remain constant throughout + training. + :vartype fixed_parameters: ~azure.mgmt.machinelearningservices.models.NlpFixedParameters + :ivar limit_settings: Execution constraints for AutoMLJob. + :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.NlpVerticalLimitSettings + :ivar search_space: Search space for sampling different combinations of models and their + hyperparameters. + :vartype search_space: list[~azure.mgmt.machinelearningservices.models.NlpParameterSubspace] + :ivar sweep_settings: Settings for model sweeping and hyperparameter tuning. + :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.NlpSweepSettings + :ivar validation_data: Validation data inputs. + :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar log_verbosity: Log verbosity for the job. Possible values include: "NotSet", "Debug", + "Info", "Warning", "Error", "Critical". + :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :ivar target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :vartype target_column_name: str + :ivar task_type: Required. [Required] Task type for AutoMLJob.Constant filled by server. + Possible values include: "Classification", "Regression", "Forecasting", "ImageClassification", + "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", + "TextClassification", "TextClassificationMultilabel", "TextNER". + :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType + :ivar training_data: Required. [Required] Training data input. + :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar primary_metric: Primary metric for Text-NER task. + Only 'Accuracy' is supported for Text-NER, so user need not set this explicitly. Possible + values include: "AUCWeighted", "Accuracy", "NormMacroRecall", "AveragePrecisionScoreWeighted", + "PrecisionScoreWeighted". + :vartype primary_metric: str or + ~azure.mgmt.machinelearningservices.models.ClassificationPrimaryMetrics """ _validation = { - 'task_type': {'required': True}, - 'training_data': {'required': True}, - 'primary_metric': {'readonly': True}, + "task_type": {"required": True}, + "training_data": {"required": True}, + "primary_metric": {"readonly": True}, } _attribute_map = { - 'featurization_settings': {'key': 'featurizationSettings', 'type': 'NlpVerticalFeaturizationSettings'}, - 'fixed_parameters': {'key': 'fixedParameters', 'type': 'NlpFixedParameters'}, - 'limit_settings': {'key': 'limitSettings', 'type': 'NlpVerticalLimitSettings'}, - 'search_space': {'key': 'searchSpace', 'type': '[NlpParameterSubspace]'}, - 'sweep_settings': {'key': 'sweepSettings', 'type': 'NlpSweepSettings'}, - 'validation_data': {'key': 'validationData', 'type': 'MLTableJobInput'}, - 'log_verbosity': {'key': 'logVerbosity', 'type': 'str'}, - 'target_column_name': {'key': 'targetColumnName', 'type': 'str'}, - 'task_type': {'key': 'taskType', 'type': 'str'}, - 'training_data': {'key': 'trainingData', 'type': 'MLTableJobInput'}, - 'primary_metric': {'key': 'primaryMetric', 'type': 'str'}, + "featurization_settings": {"key": "featurizationSettings", "type": "NlpVerticalFeaturizationSettings"}, + "fixed_parameters": {"key": "fixedParameters", "type": "NlpFixedParameters"}, + "limit_settings": {"key": "limitSettings", "type": "NlpVerticalLimitSettings"}, + "search_space": {"key": "searchSpace", "type": "[NlpParameterSubspace]"}, + "sweep_settings": {"key": "sweepSettings", "type": "NlpSweepSettings"}, + "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, + "log_verbosity": {"key": "logVerbosity", "type": "str"}, + "target_column_name": {"key": "targetColumnName", "type": "str"}, + "task_type": {"key": "taskType", "type": "str"}, + "training_data": {"key": "trainingData", "type": "MLTableJobInput"}, + "primary_metric": {"key": "primaryMetric", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword featurization_settings: Featurization inputs needed for AutoML job. :paramtype featurization_settings: @@ -33043,17 +31232,17 @@ def __init__( :paramtype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput """ super(TextNer, self).__init__(**kwargs) - self.featurization_settings = kwargs.get('featurization_settings', None) - self.fixed_parameters = kwargs.get('fixed_parameters', None) - self.limit_settings = kwargs.get('limit_settings', None) - self.search_space = kwargs.get('search_space', None) - self.sweep_settings = kwargs.get('sweep_settings', None) - self.validation_data = kwargs.get('validation_data', None) - self.task_type = 'TextNER' # type: str + self.featurization_settings = kwargs.get("featurization_settings", None) + self.fixed_parameters = kwargs.get("fixed_parameters", None) + self.limit_settings = kwargs.get("limit_settings", None) + self.search_space = kwargs.get("search_space", None) + self.sweep_settings = kwargs.get("sweep_settings", None) + self.validation_data = kwargs.get("validation_data", None) + self.task_type = "TextNER" # type: str self.primary_metric = None - self.log_verbosity = kwargs.get('log_verbosity', None) - self.target_column_name = kwargs.get('target_column_name', None) - self.training_data = kwargs['training_data'] + self.log_verbosity = kwargs.get("log_verbosity", None) + self.target_column_name = kwargs.get("target_column_name", None) + self.training_data = kwargs["training_data"] class ThrottlingRule(msrest.serialization.Model): @@ -33074,18 +31263,15 @@ class ThrottlingRule(msrest.serialization.Model): """ _attribute_map = { - 'key': {'key': 'key', 'type': 'str'}, - 'renewal_period': {'key': 'renewalPeriod', 'type': 'float'}, - 'count': {'key': 'count', 'type': 'float'}, - 'min_count': {'key': 'minCount', 'type': 'float'}, - 'dynamic_throttling_enabled': {'key': 'dynamicThrottlingEnabled', 'type': 'bool'}, - 'match_patterns': {'key': 'matchPatterns', 'type': '[RequestMatchPattern]'}, + "key": {"key": "key", "type": "str"}, + "renewal_period": {"key": "renewalPeriod", "type": "float"}, + "count": {"key": "count", "type": "float"}, + "min_count": {"key": "minCount", "type": "float"}, + "dynamic_throttling_enabled": {"key": "dynamicThrottlingEnabled", "type": "bool"}, + "match_patterns": {"key": "matchPatterns", "type": "[RequestMatchPattern]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword key: :paramtype key: str @@ -33101,12 +31287,12 @@ def __init__( :paramtype match_patterns: list[~azure.mgmt.machinelearningservices.models.RequestMatchPattern] """ super(ThrottlingRule, self).__init__(**kwargs) - self.key = kwargs.get('key', None) - self.renewal_period = kwargs.get('renewal_period', None) - self.count = kwargs.get('count', None) - self.min_count = kwargs.get('min_count', None) - self.dynamic_throttling_enabled = kwargs.get('dynamic_throttling_enabled', None) - self.match_patterns = kwargs.get('match_patterns', None) + self.key = kwargs.get("key", None) + self.renewal_period = kwargs.get("renewal_period", None) + self.count = kwargs.get("count", None) + self.min_count = kwargs.get("min_count", None) + self.dynamic_throttling_enabled = kwargs.get("dynamic_throttling_enabled", None) + self.match_patterns = kwargs.get("match_patterns", None) class TmpfsOptions(msrest.serialization.Model): @@ -33117,19 +31303,16 @@ class TmpfsOptions(msrest.serialization.Model): """ _attribute_map = { - 'size': {'key': 'size', 'type': 'int'}, + "size": {"key": "size", "type": "int"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword size: Mention the Tmpfs size. :paramtype size: int """ super(TmpfsOptions, self).__init__(**kwargs) - self.size = kwargs.get('size', None) + self.size = kwargs.get("size", None) class TopNFeaturesByAttribution(MonitoringFeatureFilterBase): @@ -33147,25 +31330,22 @@ class TopNFeaturesByAttribution(MonitoringFeatureFilterBase): """ _validation = { - 'filter_type': {'required': True}, + "filter_type": {"required": True}, } _attribute_map = { - 'filter_type': {'key': 'filterType', 'type': 'str'}, - 'top': {'key': 'top', 'type': 'int'}, + "filter_type": {"key": "filterType", "type": "str"}, + "top": {"key": "top", "type": "int"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword top: The number of top features to include. :paramtype top: int """ super(TopNFeaturesByAttribution, self).__init__(**kwargs) - self.filter_type = 'TopNByAttribution' # type: str - self.top = kwargs.get('top', 10) + self.filter_type = "TopNByAttribution" # type: str + self.top = kwargs.get("top", 10) class TrialComponent(msrest.serialization.Model): @@ -33191,23 +31371,20 @@ class TrialComponent(msrest.serialization.Model): """ _validation = { - 'command': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'environment_id': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "command": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "environment_id": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'code_id': {'key': 'codeId', 'type': 'str'}, - 'command': {'key': 'command', 'type': 'str'}, - 'distribution': {'key': 'distribution', 'type': 'DistributionConfiguration'}, - 'environment_id': {'key': 'environmentId', 'type': 'str'}, - 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, - 'resources': {'key': 'resources', 'type': 'JobResourceConfiguration'}, + "code_id": {"key": "codeId", "type": "str"}, + "command": {"key": "command", "type": "str"}, + "distribution": {"key": "distribution", "type": "DistributionConfiguration"}, + "environment_id": {"key": "environmentId", "type": "str"}, + "environment_variables": {"key": "environmentVariables", "type": "{str}"}, + "resources": {"key": "resources", "type": "JobResourceConfiguration"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword code_id: ARM resource ID of the code asset. :paramtype code_id: str @@ -33226,12 +31403,12 @@ def __init__( :paramtype resources: ~azure.mgmt.machinelearningservices.models.JobResourceConfiguration """ super(TrialComponent, self).__init__(**kwargs) - self.code_id = kwargs.get('code_id', None) - self.command = kwargs['command'] - self.distribution = kwargs.get('distribution', None) - self.environment_id = kwargs['environment_id'] - self.environment_variables = kwargs.get('environment_variables', None) - self.resources = kwargs.get('resources', None) + self.code_id = kwargs.get("code_id", None) + self.command = kwargs["command"] + self.distribution = kwargs.get("distribution", None) + self.environment_id = kwargs["environment_id"] + self.environment_variables = kwargs.get("environment_variables", None) + self.resources = kwargs.get("resources", None) class TriggerOnceRequest(msrest.serialization.Model): @@ -33244,23 +31421,20 @@ class TriggerOnceRequest(msrest.serialization.Model): """ _validation = { - 'schedule_time': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "schedule_time": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'schedule_time': {'key': 'scheduleTime', 'type': 'str'}, + "schedule_time": {"key": "scheduleTime", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword schedule_time: Required. [Required] Specify the schedule time for trigger once. :paramtype schedule_time: str """ super(TriggerOnceRequest, self).__init__(**kwargs) - self.schedule_time = kwargs['schedule_time'] + self.schedule_time = kwargs["schedule_time"] class TriggerRunSubmissionDto(msrest.serialization.Model): @@ -33274,14 +31448,11 @@ class TriggerRunSubmissionDto(msrest.serialization.Model): """ _attribute_map = { - 'schedule_action_type': {'key': 'scheduleActionType', 'type': 'str'}, - 'submission_id': {'key': 'submissionId', 'type': 'str'}, + "schedule_action_type": {"key": "scheduleActionType", "type": "str"}, + "submission_id": {"key": "submissionId", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword schedule_action_type: Possible values include: "ComputeStartStop", "CreateJob", "InvokeBatchEndpoint", "ImportData", "CreateMonitor", "FeatureStoreMaterialization". @@ -33290,8 +31461,8 @@ def __init__( :paramtype submission_id: str """ super(TriggerRunSubmissionDto, self).__init__(**kwargs) - self.schedule_action_type = kwargs.get('schedule_action_type', None) - self.submission_id = kwargs.get('submission_id', None) + self.schedule_action_type = kwargs.get("schedule_action_type", None) + self.submission_id = kwargs.get("submission_id", None) class TritonInferencingServer(InferencingServer): @@ -33308,26 +31479,23 @@ class TritonInferencingServer(InferencingServer): """ _validation = { - 'server_type': {'required': True}, + "server_type": {"required": True}, } _attribute_map = { - 'server_type': {'key': 'serverType', 'type': 'str'}, - 'inference_configuration': {'key': 'inferenceConfiguration', 'type': 'OnlineInferenceConfiguration'}, + "server_type": {"key": "serverType", "type": "str"}, + "inference_configuration": {"key": "inferenceConfiguration", "type": "OnlineInferenceConfiguration"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword inference_configuration: Inference configuration for Triton. :paramtype inference_configuration: ~azure.mgmt.machinelearningservices.models.OnlineInferenceConfiguration """ super(TritonInferencingServer, self).__init__(**kwargs) - self.server_type = 'Triton' # type: str - self.inference_configuration = kwargs.get('inference_configuration', None) + self.server_type = "Triton" # type: str + self.inference_configuration = kwargs.get("inference_configuration", None) class TritonModelJobInput(JobInput, AssetJobInput): @@ -33351,22 +31519,19 @@ class TritonModelJobInput(JobInput, AssetJobInput): """ _validation = { - 'uri': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'job_input_type': {'required': True}, + "uri": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "job_input_type": {"required": True}, } _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - 'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'}, - 'uri': {'key': 'uri', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'job_input_type': {'key': 'jobInputType', 'type': 'str'}, + "mode": {"key": "mode", "type": "str"}, + "path_on_compute": {"key": "pathOnCompute", "type": "str"}, + "uri": {"key": "uri", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "job_input_type": {"key": "jobInputType", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword mode: Input Asset Delivery Mode. Possible values include: "ReadOnlyMount", "ReadWriteMount", "Download", "Direct", "EvalMount", "EvalDownload". @@ -33379,11 +31544,11 @@ def __init__( :paramtype description: str """ super(TritonModelJobInput, self).__init__(**kwargs) - self.mode = kwargs.get('mode', None) - self.path_on_compute = kwargs.get('path_on_compute', None) - self.uri = kwargs['uri'] - self.job_input_type = 'triton_model' # type: str - self.description = kwargs.get('description', None) + self.mode = kwargs.get("mode", None) + self.path_on_compute = kwargs.get("path_on_compute", None) + self.uri = kwargs["uri"] + self.job_input_type = "triton_model" # type: str + self.description = kwargs.get("description", None) class TritonModelJobOutput(JobOutput, AssetJobOutput): @@ -33413,24 +31578,21 @@ class TritonModelJobOutput(JobOutput, AssetJobOutput): """ _validation = { - 'job_output_type': {'required': True}, + "job_output_type": {"required": True}, } _attribute_map = { - 'asset_name': {'key': 'assetName', 'type': 'str'}, - 'asset_version': {'key': 'assetVersion', 'type': 'str'}, - 'auto_delete_setting': {'key': 'autoDeleteSetting', 'type': 'AutoDeleteSetting'}, - 'mode': {'key': 'mode', 'type': 'str'}, - 'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'}, - 'uri': {'key': 'uri', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'job_output_type': {'key': 'jobOutputType', 'type': 'str'}, + "asset_name": {"key": "assetName", "type": "str"}, + "asset_version": {"key": "assetVersion", "type": "str"}, + "auto_delete_setting": {"key": "autoDeleteSetting", "type": "AutoDeleteSetting"}, + "mode": {"key": "mode", "type": "str"}, + "path_on_compute": {"key": "pathOnCompute", "type": "str"}, + "uri": {"key": "uri", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "job_output_type": {"key": "jobOutputType", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword asset_name: Output Asset Name. :paramtype asset_name: str @@ -33449,14 +31611,14 @@ def __init__( :paramtype description: str """ super(TritonModelJobOutput, self).__init__(**kwargs) - self.asset_name = kwargs.get('asset_name', None) - self.asset_version = kwargs.get('asset_version', None) - self.auto_delete_setting = kwargs.get('auto_delete_setting', None) - self.mode = kwargs.get('mode', None) - self.path_on_compute = kwargs.get('path_on_compute', None) - self.uri = kwargs.get('uri', None) - self.job_output_type = 'triton_model' # type: str - self.description = kwargs.get('description', None) + self.asset_name = kwargs.get("asset_name", None) + self.asset_version = kwargs.get("asset_version", None) + self.auto_delete_setting = kwargs.get("auto_delete_setting", None) + self.mode = kwargs.get("mode", None) + self.path_on_compute = kwargs.get("path_on_compute", None) + self.uri = kwargs.get("uri", None) + self.job_output_type = "triton_model" # type: str + self.description = kwargs.get("description", None) class TruncationSelectionPolicy(EarlyTerminationPolicy): @@ -33477,20 +31639,17 @@ class TruncationSelectionPolicy(EarlyTerminationPolicy): """ _validation = { - 'policy_type': {'required': True}, + "policy_type": {"required": True}, } _attribute_map = { - 'delay_evaluation': {'key': 'delayEvaluation', 'type': 'int'}, - 'evaluation_interval': {'key': 'evaluationInterval', 'type': 'int'}, - 'policy_type': {'key': 'policyType', 'type': 'str'}, - 'truncation_percentage': {'key': 'truncationPercentage', 'type': 'int'}, + "delay_evaluation": {"key": "delayEvaluation", "type": "int"}, + "evaluation_interval": {"key": "evaluationInterval", "type": "int"}, + "policy_type": {"key": "policyType", "type": "str"}, + "truncation_percentage": {"key": "truncationPercentage", "type": "int"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword delay_evaluation: Number of intervals by which to delay the first evaluation. :paramtype delay_evaluation: int @@ -33500,8 +31659,8 @@ def __init__( :paramtype truncation_percentage: int """ super(TruncationSelectionPolicy, self).__init__(**kwargs) - self.policy_type = 'TruncationSelection' # type: str - self.truncation_percentage = kwargs.get('truncation_percentage', 0) + self.policy_type = "TruncationSelection" # type: str + self.truncation_percentage = kwargs.get("truncation_percentage", 0) class UpdateWorkspaceQuotas(msrest.serialization.Model): @@ -33525,23 +31684,20 @@ class UpdateWorkspaceQuotas(msrest.serialization.Model): """ _validation = { - 'id': {'readonly': True}, - 'type': {'readonly': True}, - 'unit': {'readonly': True}, + "id": {"readonly": True}, + "type": {"readonly": True}, + "unit": {"readonly": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'limit': {'key': 'limit', 'type': 'long'}, - 'unit': {'key': 'unit', 'type': 'str'}, - 'status': {'key': 'status', 'type': 'str'}, + "id": {"key": "id", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "limit": {"key": "limit", "type": "long"}, + "unit": {"key": "unit", "type": "str"}, + "status": {"key": "status", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword limit: The maximum permitted quota of the resource. :paramtype limit: long @@ -33554,9 +31710,9 @@ def __init__( super(UpdateWorkspaceQuotas, self).__init__(**kwargs) self.id = None self.type = None - self.limit = kwargs.get('limit', None) + self.limit = kwargs.get("limit", None) self.unit = None - self.status = kwargs.get('status', None) + self.status = kwargs.get("status", None) class UpdateWorkspaceQuotasResult(msrest.serialization.Model): @@ -33572,21 +31728,17 @@ class UpdateWorkspaceQuotasResult(msrest.serialization.Model): """ _validation = { - 'value': {'readonly': True}, - 'next_link': {'readonly': True}, + "value": {"readonly": True}, + "next_link": {"readonly": True}, } _attribute_map = { - 'value': {'key': 'value', 'type': '[UpdateWorkspaceQuotas]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + "value": {"key": "value", "type": "[UpdateWorkspaceQuotas]"}, + "next_link": {"key": "nextLink", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(UpdateWorkspaceQuotasResult, self).__init__(**kwargs) self.value = None self.next_link = None @@ -33625,27 +31777,24 @@ class UriFileDataVersion(DataVersionBaseProperties): """ _validation = { - 'data_type': {'required': True}, - 'data_uri': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "data_type": {"required": True}, + "data_uri": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'auto_delete_setting': {'key': 'autoDeleteSetting', 'type': 'AutoDeleteSetting'}, - 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'data_uri': {'key': 'dataUri', 'type': 'str'}, - 'intellectual_property': {'key': 'intellectualProperty', 'type': 'IntellectualProperty'}, - 'stage': {'key': 'stage', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "auto_delete_setting": {"key": "autoDeleteSetting", "type": "AutoDeleteSetting"}, + "is_anonymous": {"key": "isAnonymous", "type": "bool"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "data_type": {"key": "dataType", "type": "str"}, + "data_uri": {"key": "dataUri", "type": "str"}, + "intellectual_property": {"key": "intellectualProperty", "type": "IntellectualProperty"}, + "stage": {"key": "stage", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword description: The asset description text. :paramtype description: str @@ -33672,7 +31821,7 @@ def __init__( :paramtype stage: str """ super(UriFileDataVersion, self).__init__(**kwargs) - self.data_type = 'uri_file' # type: str + self.data_type = "uri_file" # type: str class UriFileJobInput(JobInput, AssetJobInput): @@ -33696,22 +31845,19 @@ class UriFileJobInput(JobInput, AssetJobInput): """ _validation = { - 'uri': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'job_input_type': {'required': True}, + "uri": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "job_input_type": {"required": True}, } _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - 'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'}, - 'uri': {'key': 'uri', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'job_input_type': {'key': 'jobInputType', 'type': 'str'}, + "mode": {"key": "mode", "type": "str"}, + "path_on_compute": {"key": "pathOnCompute", "type": "str"}, + "uri": {"key": "uri", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "job_input_type": {"key": "jobInputType", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword mode: Input Asset Delivery Mode. Possible values include: "ReadOnlyMount", "ReadWriteMount", "Download", "Direct", "EvalMount", "EvalDownload". @@ -33724,11 +31870,11 @@ def __init__( :paramtype description: str """ super(UriFileJobInput, self).__init__(**kwargs) - self.mode = kwargs.get('mode', None) - self.path_on_compute = kwargs.get('path_on_compute', None) - self.uri = kwargs['uri'] - self.job_input_type = 'uri_file' # type: str - self.description = kwargs.get('description', None) + self.mode = kwargs.get("mode", None) + self.path_on_compute = kwargs.get("path_on_compute", None) + self.uri = kwargs["uri"] + self.job_input_type = "uri_file" # type: str + self.description = kwargs.get("description", None) class UriFileJobOutput(JobOutput, AssetJobOutput): @@ -33758,24 +31904,21 @@ class UriFileJobOutput(JobOutput, AssetJobOutput): """ _validation = { - 'job_output_type': {'required': True}, + "job_output_type": {"required": True}, } _attribute_map = { - 'asset_name': {'key': 'assetName', 'type': 'str'}, - 'asset_version': {'key': 'assetVersion', 'type': 'str'}, - 'auto_delete_setting': {'key': 'autoDeleteSetting', 'type': 'AutoDeleteSetting'}, - 'mode': {'key': 'mode', 'type': 'str'}, - 'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'}, - 'uri': {'key': 'uri', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'job_output_type': {'key': 'jobOutputType', 'type': 'str'}, + "asset_name": {"key": "assetName", "type": "str"}, + "asset_version": {"key": "assetVersion", "type": "str"}, + "auto_delete_setting": {"key": "autoDeleteSetting", "type": "AutoDeleteSetting"}, + "mode": {"key": "mode", "type": "str"}, + "path_on_compute": {"key": "pathOnCompute", "type": "str"}, + "uri": {"key": "uri", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "job_output_type": {"key": "jobOutputType", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword asset_name: Output Asset Name. :paramtype asset_name: str @@ -33794,14 +31937,14 @@ def __init__( :paramtype description: str """ super(UriFileJobOutput, self).__init__(**kwargs) - self.asset_name = kwargs.get('asset_name', None) - self.asset_version = kwargs.get('asset_version', None) - self.auto_delete_setting = kwargs.get('auto_delete_setting', None) - self.mode = kwargs.get('mode', None) - self.path_on_compute = kwargs.get('path_on_compute', None) - self.uri = kwargs.get('uri', None) - self.job_output_type = 'uri_file' # type: str - self.description = kwargs.get('description', None) + self.asset_name = kwargs.get("asset_name", None) + self.asset_version = kwargs.get("asset_version", None) + self.auto_delete_setting = kwargs.get("auto_delete_setting", None) + self.mode = kwargs.get("mode", None) + self.path_on_compute = kwargs.get("path_on_compute", None) + self.uri = kwargs.get("uri", None) + self.job_output_type = "uri_file" # type: str + self.description = kwargs.get("description", None) class UriFolderDataVersion(DataVersionBaseProperties): @@ -33837,27 +31980,24 @@ class UriFolderDataVersion(DataVersionBaseProperties): """ _validation = { - 'data_type': {'required': True}, - 'data_uri': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "data_type": {"required": True}, + "data_uri": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'auto_delete_setting': {'key': 'autoDeleteSetting', 'type': 'AutoDeleteSetting'}, - 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'data_uri': {'key': 'dataUri', 'type': 'str'}, - 'intellectual_property': {'key': 'intellectualProperty', 'type': 'IntellectualProperty'}, - 'stage': {'key': 'stage', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "auto_delete_setting": {"key": "autoDeleteSetting", "type": "AutoDeleteSetting"}, + "is_anonymous": {"key": "isAnonymous", "type": "bool"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "data_type": {"key": "dataType", "type": "str"}, + "data_uri": {"key": "dataUri", "type": "str"}, + "intellectual_property": {"key": "intellectualProperty", "type": "IntellectualProperty"}, + "stage": {"key": "stage", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword description: The asset description text. :paramtype description: str @@ -33884,7 +32024,7 @@ def __init__( :paramtype stage: str """ super(UriFolderDataVersion, self).__init__(**kwargs) - self.data_type = 'uri_folder' # type: str + self.data_type = "uri_folder" # type: str class UriFolderJobInput(JobInput, AssetJobInput): @@ -33908,22 +32048,19 @@ class UriFolderJobInput(JobInput, AssetJobInput): """ _validation = { - 'uri': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'job_input_type': {'required': True}, + "uri": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "job_input_type": {"required": True}, } _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - 'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'}, - 'uri': {'key': 'uri', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'job_input_type': {'key': 'jobInputType', 'type': 'str'}, + "mode": {"key": "mode", "type": "str"}, + "path_on_compute": {"key": "pathOnCompute", "type": "str"}, + "uri": {"key": "uri", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "job_input_type": {"key": "jobInputType", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword mode: Input Asset Delivery Mode. Possible values include: "ReadOnlyMount", "ReadWriteMount", "Download", "Direct", "EvalMount", "EvalDownload". @@ -33936,11 +32073,11 @@ def __init__( :paramtype description: str """ super(UriFolderJobInput, self).__init__(**kwargs) - self.mode = kwargs.get('mode', None) - self.path_on_compute = kwargs.get('path_on_compute', None) - self.uri = kwargs['uri'] - self.job_input_type = 'uri_folder' # type: str - self.description = kwargs.get('description', None) + self.mode = kwargs.get("mode", None) + self.path_on_compute = kwargs.get("path_on_compute", None) + self.uri = kwargs["uri"] + self.job_input_type = "uri_folder" # type: str + self.description = kwargs.get("description", None) class UriFolderJobOutput(JobOutput, AssetJobOutput): @@ -33970,24 +32107,21 @@ class UriFolderJobOutput(JobOutput, AssetJobOutput): """ _validation = { - 'job_output_type': {'required': True}, + "job_output_type": {"required": True}, } _attribute_map = { - 'asset_name': {'key': 'assetName', 'type': 'str'}, - 'asset_version': {'key': 'assetVersion', 'type': 'str'}, - 'auto_delete_setting': {'key': 'autoDeleteSetting', 'type': 'AutoDeleteSetting'}, - 'mode': {'key': 'mode', 'type': 'str'}, - 'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'}, - 'uri': {'key': 'uri', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'job_output_type': {'key': 'jobOutputType', 'type': 'str'}, + "asset_name": {"key": "assetName", "type": "str"}, + "asset_version": {"key": "assetVersion", "type": "str"}, + "auto_delete_setting": {"key": "autoDeleteSetting", "type": "AutoDeleteSetting"}, + "mode": {"key": "mode", "type": "str"}, + "path_on_compute": {"key": "pathOnCompute", "type": "str"}, + "uri": {"key": "uri", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "job_output_type": {"key": "jobOutputType", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword asset_name: Output Asset Name. :paramtype asset_name: str @@ -34006,14 +32140,14 @@ def __init__( :paramtype description: str """ super(UriFolderJobOutput, self).__init__(**kwargs) - self.asset_name = kwargs.get('asset_name', None) - self.asset_version = kwargs.get('asset_version', None) - self.auto_delete_setting = kwargs.get('auto_delete_setting', None) - self.mode = kwargs.get('mode', None) - self.path_on_compute = kwargs.get('path_on_compute', None) - self.uri = kwargs.get('uri', None) - self.job_output_type = 'uri_folder' # type: str - self.description = kwargs.get('description', None) + self.asset_name = kwargs.get("asset_name", None) + self.asset_version = kwargs.get("asset_version", None) + self.auto_delete_setting = kwargs.get("auto_delete_setting", None) + self.mode = kwargs.get("mode", None) + self.path_on_compute = kwargs.get("path_on_compute", None) + self.uri = kwargs.get("uri", None) + self.job_output_type = "uri_folder" # type: str + self.description = kwargs.get("description", None) class Usage(msrest.serialization.Model): @@ -34038,31 +32172,27 @@ class Usage(msrest.serialization.Model): """ _validation = { - 'id': {'readonly': True}, - 'aml_workspace_location': {'readonly': True}, - 'type': {'readonly': True}, - 'unit': {'readonly': True}, - 'current_value': {'readonly': True}, - 'limit': {'readonly': True}, - 'name': {'readonly': True}, + "id": {"readonly": True}, + "aml_workspace_location": {"readonly": True}, + "type": {"readonly": True}, + "unit": {"readonly": True}, + "current_value": {"readonly": True}, + "limit": {"readonly": True}, + "name": {"readonly": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'aml_workspace_location': {'key': 'amlWorkspaceLocation', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'unit': {'key': 'unit', 'type': 'str'}, - 'current_value': {'key': 'currentValue', 'type': 'long'}, - 'limit': {'key': 'limit', 'type': 'long'}, - 'name': {'key': 'name', 'type': 'UsageName'}, + "id": {"key": "id", "type": "str"}, + "aml_workspace_location": {"key": "amlWorkspaceLocation", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "unit": {"key": "unit", "type": "str"}, + "current_value": {"key": "currentValue", "type": "long"}, + "limit": {"key": "limit", "type": "long"}, + "name": {"key": "name", "type": "UsageName"}, } - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(Usage, self).__init__(**kwargs) self.id = None self.aml_workspace_location = None @@ -34085,21 +32215,17 @@ class UsageName(msrest.serialization.Model): """ _validation = { - 'value': {'readonly': True}, - 'localized_value': {'readonly': True}, + "value": {"readonly": True}, + "localized_value": {"readonly": True}, } _attribute_map = { - 'value': {'key': 'value', 'type': 'str'}, - 'localized_value': {'key': 'localizedValue', 'type': 'str'}, + "value": {"key": "value", "type": "str"}, + "localized_value": {"key": "localizedValue", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(UsageName, self).__init__(**kwargs) self.value = None self.localized_value = None @@ -34120,19 +32246,16 @@ class UserAccountCredentials(msrest.serialization.Model): """ _validation = { - 'admin_user_name': {'required': True}, + "admin_user_name": {"required": True}, } _attribute_map = { - 'admin_user_name': {'key': 'adminUserName', 'type': 'str'}, - 'admin_user_ssh_public_key': {'key': 'adminUserSshPublicKey', 'type': 'str'}, - 'admin_user_password': {'key': 'adminUserPassword', 'type': 'str'}, + "admin_user_name": {"key": "adminUserName", "type": "str"}, + "admin_user_ssh_public_key": {"key": "adminUserSshPublicKey", "type": "str"}, + "admin_user_password": {"key": "adminUserPassword", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword admin_user_name: Required. Name of the administrator user account which can be used to SSH to nodes. @@ -34143,9 +32266,9 @@ def __init__( :paramtype admin_user_password: str """ super(UserAccountCredentials, self).__init__(**kwargs) - self.admin_user_name = kwargs['admin_user_name'] - self.admin_user_ssh_public_key = kwargs.get('admin_user_ssh_public_key', None) - self.admin_user_password = kwargs.get('admin_user_password', None) + self.admin_user_name = kwargs["admin_user_name"] + self.admin_user_ssh_public_key = kwargs.get("admin_user_ssh_public_key", None) + self.admin_user_password = kwargs.get("admin_user_password", None) class UserAssignedIdentity(msrest.serialization.Model): @@ -34160,21 +32283,17 @@ class UserAssignedIdentity(msrest.serialization.Model): """ _validation = { - 'principal_id': {'readonly': True}, - 'client_id': {'readonly': True}, + "principal_id": {"readonly": True}, + "client_id": {"readonly": True}, } _attribute_map = { - 'principal_id': {'key': 'principalId', 'type': 'str'}, - 'client_id': {'key': 'clientId', 'type': 'str'}, + "principal_id": {"key": "principalId", "type": "str"}, + "client_id": {"key": "clientId", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(UserAssignedIdentity, self).__init__(**kwargs) self.principal_id = None self.client_id = None @@ -34188,19 +32307,16 @@ class UserCreatedAcrAccount(msrest.serialization.Model): """ _attribute_map = { - 'arm_resource_id': {'key': 'armResourceId', 'type': 'ArmResourceId'}, + "arm_resource_id": {"key": "armResourceId", "type": "ArmResourceId"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword arm_resource_id: ARM ResourceId of a resource. :paramtype arm_resource_id: ~azure.mgmt.machinelearningservices.models.ArmResourceId """ super(UserCreatedAcrAccount, self).__init__(**kwargs) - self.arm_resource_id = kwargs.get('arm_resource_id', None) + self.arm_resource_id = kwargs.get("arm_resource_id", None) class UserCreatedStorageAccount(msrest.serialization.Model): @@ -34211,19 +32327,16 @@ class UserCreatedStorageAccount(msrest.serialization.Model): """ _attribute_map = { - 'arm_resource_id': {'key': 'armResourceId', 'type': 'ArmResourceId'}, + "arm_resource_id": {"key": "armResourceId", "type": "ArmResourceId"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword arm_resource_id: ARM ResourceId of a resource. :paramtype arm_resource_id: ~azure.mgmt.machinelearningservices.models.ArmResourceId """ super(UserCreatedStorageAccount, self).__init__(**kwargs) - self.arm_resource_id = kwargs.get('arm_resource_id', None) + self.arm_resource_id = kwargs.get("arm_resource_id", None) class UserIdentity(IdentityConfiguration): @@ -34238,21 +32351,17 @@ class UserIdentity(IdentityConfiguration): """ _validation = { - 'identity_type': {'required': True}, + "identity_type": {"required": True}, } _attribute_map = { - 'identity_type': {'key': 'identityType', 'type': 'str'}, + "identity_type": {"key": "identityType", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(UserIdentity, self).__init__(**kwargs) - self.identity_type = 'UserIdentity' # type: str + self.identity_type = "UserIdentity" # type: str class UsernamePasswordAuthTypeWorkspaceConnectionProperties(WorkspaceConnectionPropertiesV2): @@ -34305,28 +32414,25 @@ class UsernamePasswordAuthTypeWorkspaceConnectionProperties(WorkspaceConnectionP """ _validation = { - 'auth_type': {'required': True}, - 'created_by_workspace_arm_id': {'readonly': True}, - 'group': {'readonly': True}, + "auth_type": {"required": True}, + "created_by_workspace_arm_id": {"readonly": True}, + "group": {"readonly": True}, } _attribute_map = { - 'auth_type': {'key': 'authType', 'type': 'str'}, - 'category': {'key': 'category', 'type': 'str'}, - 'created_by_workspace_arm_id': {'key': 'createdByWorkspaceArmId', 'type': 'str'}, - 'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'}, - 'group': {'key': 'group', 'type': 'str'}, - 'is_shared_to_all': {'key': 'isSharedToAll', 'type': 'bool'}, - 'metadata': {'key': 'metadata', 'type': 'object'}, - 'shared_user_list': {'key': 'sharedUserList', 'type': '[str]'}, - 'target': {'key': 'target', 'type': 'str'}, - 'credentials': {'key': 'credentials', 'type': 'WorkspaceConnectionUsernamePassword'}, + "auth_type": {"key": "authType", "type": "str"}, + "category": {"key": "category", "type": "str"}, + "created_by_workspace_arm_id": {"key": "createdByWorkspaceArmId", "type": "str"}, + "expiry_time": {"key": "expiryTime", "type": "iso-8601"}, + "group": {"key": "group", "type": "str"}, + "is_shared_to_all": {"key": "isSharedToAll", "type": "bool"}, + "metadata": {"key": "metadata", "type": "object"}, + "shared_user_list": {"key": "sharedUserList", "type": "[str]"}, + "target": {"key": "target", "type": "str"}, + "credentials": {"key": "credentials", "type": "WorkspaceConnectionUsernamePassword"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword category: Category of the connection. Possible values include: "PythonFeed", "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", @@ -34361,8 +32467,8 @@ def __init__( ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionUsernamePassword """ super(UsernamePasswordAuthTypeWorkspaceConnectionProperties, self).__init__(**kwargs) - self.auth_type = 'UsernamePassword' # type: str - self.credentials = kwargs.get('credentials', None) + self.auth_type = "UsernamePassword" # type: str + self.credentials = kwargs.get("credentials", None) class VirtualMachineSchema(msrest.serialization.Model): @@ -34373,20 +32479,17 @@ class VirtualMachineSchema(msrest.serialization.Model): """ _attribute_map = { - 'properties': {'key': 'properties', 'type': 'VirtualMachineSchemaProperties'}, + "properties": {"key": "properties", "type": "VirtualMachineSchemaProperties"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword properties: :paramtype properties: ~azure.mgmt.machinelearningservices.models.VirtualMachineSchemaProperties """ super(VirtualMachineSchema, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) + self.properties = kwargs.get("properties", None) class VirtualMachine(Compute, VirtualMachineSchema): @@ -34428,32 +32531,29 @@ class VirtualMachine(Compute, VirtualMachineSchema): """ _validation = { - 'compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, + "compute_type": {"required": True}, + "provisioning_state": {"readonly": True}, + "created_on": {"readonly": True}, + "modified_on": {"readonly": True}, + "provisioning_errors": {"readonly": True}, + "is_attached_compute": {"readonly": True}, } _attribute_map = { - 'properties': {'key': 'properties', 'type': 'VirtualMachineSchemaProperties'}, - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, + "properties": {"key": "properties", "type": "VirtualMachineSchemaProperties"}, + "compute_type": {"key": "computeType", "type": "str"}, + "compute_location": {"key": "computeLocation", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "created_on": {"key": "createdOn", "type": "iso-8601"}, + "modified_on": {"key": "modifiedOn", "type": "iso-8601"}, + "resource_id": {"key": "resourceId", "type": "str"}, + "provisioning_errors": {"key": "provisioningErrors", "type": "[ErrorResponse]"}, + "is_attached_compute": {"key": "isAttachedCompute", "type": "bool"}, + "disable_local_auth": {"key": "disableLocalAuth", "type": "bool"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword properties: :paramtype properties: @@ -34469,17 +32569,17 @@ def __init__( :paramtype disable_local_auth: bool """ super(VirtualMachine, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) - self.compute_type = 'VirtualMachine' # type: str - self.compute_location = kwargs.get('compute_location', None) + self.properties = kwargs.get("properties", None) + self.compute_type = "VirtualMachine" # type: str + self.compute_location = kwargs.get("compute_location", None) self.provisioning_state = None - self.description = kwargs.get('description', None) + self.description = kwargs.get("description", None) self.created_on = None self.modified_on = None - self.resource_id = kwargs.get('resource_id', None) + self.resource_id = kwargs.get("resource_id", None) self.provisioning_errors = None self.is_attached_compute = None - self.disable_local_auth = kwargs.get('disable_local_auth', None) + self.disable_local_auth = kwargs.get("disable_local_auth", None) class VirtualMachineImage(msrest.serialization.Model): @@ -34492,23 +32592,20 @@ class VirtualMachineImage(msrest.serialization.Model): """ _validation = { - 'id': {'required': True}, + "id": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, + "id": {"key": "id", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword id: Required. Virtual Machine image path. :paramtype id: str """ super(VirtualMachineImage, self).__init__(**kwargs) - self.id = kwargs['id'] + self.id = kwargs["id"] class VirtualMachineSchemaProperties(msrest.serialization.Model): @@ -34531,18 +32628,15 @@ class VirtualMachineSchemaProperties(msrest.serialization.Model): """ _attribute_map = { - 'virtual_machine_size': {'key': 'virtualMachineSize', 'type': 'str'}, - 'ssh_port': {'key': 'sshPort', 'type': 'int'}, - 'notebook_server_port': {'key': 'notebookServerPort', 'type': 'int'}, - 'address': {'key': 'address', 'type': 'str'}, - 'administrator_account': {'key': 'administratorAccount', 'type': 'VirtualMachineSshCredentials'}, - 'is_notebook_instance_compute': {'key': 'isNotebookInstanceCompute', 'type': 'bool'}, + "virtual_machine_size": {"key": "virtualMachineSize", "type": "str"}, + "ssh_port": {"key": "sshPort", "type": "int"}, + "notebook_server_port": {"key": "notebookServerPort", "type": "int"}, + "address": {"key": "address", "type": "str"}, + "administrator_account": {"key": "administratorAccount", "type": "VirtualMachineSshCredentials"}, + "is_notebook_instance_compute": {"key": "isNotebookInstanceCompute", "type": "bool"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword virtual_machine_size: Virtual Machine size. :paramtype virtual_machine_size: str @@ -34560,12 +32654,12 @@ def __init__( :paramtype is_notebook_instance_compute: bool """ super(VirtualMachineSchemaProperties, self).__init__(**kwargs) - self.virtual_machine_size = kwargs.get('virtual_machine_size', None) - self.ssh_port = kwargs.get('ssh_port', None) - self.notebook_server_port = kwargs.get('notebook_server_port', None) - self.address = kwargs.get('address', None) - self.administrator_account = kwargs.get('administrator_account', None) - self.is_notebook_instance_compute = kwargs.get('is_notebook_instance_compute', None) + self.virtual_machine_size = kwargs.get("virtual_machine_size", None) + self.ssh_port = kwargs.get("ssh_port", None) + self.notebook_server_port = kwargs.get("notebook_server_port", None) + self.address = kwargs.get("address", None) + self.administrator_account = kwargs.get("administrator_account", None) + self.is_notebook_instance_compute = kwargs.get("is_notebook_instance_compute", None) class VirtualMachineSecretsSchema(msrest.serialization.Model): @@ -34577,20 +32671,17 @@ class VirtualMachineSecretsSchema(msrest.serialization.Model): """ _attribute_map = { - 'administrator_account': {'key': 'administratorAccount', 'type': 'VirtualMachineSshCredentials'}, + "administrator_account": {"key": "administratorAccount", "type": "VirtualMachineSshCredentials"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword administrator_account: Admin credentials for virtual machine. :paramtype administrator_account: ~azure.mgmt.machinelearningservices.models.VirtualMachineSshCredentials """ super(VirtualMachineSecretsSchema, self).__init__(**kwargs) - self.administrator_account = kwargs.get('administrator_account', None) + self.administrator_account = kwargs.get("administrator_account", None) class VirtualMachineSecrets(ComputeSecrets, VirtualMachineSecretsSchema): @@ -34608,26 +32699,23 @@ class VirtualMachineSecrets(ComputeSecrets, VirtualMachineSecretsSchema): """ _validation = { - 'compute_type': {'required': True}, + "compute_type": {"required": True}, } _attribute_map = { - 'administrator_account': {'key': 'administratorAccount', 'type': 'VirtualMachineSshCredentials'}, - 'compute_type': {'key': 'computeType', 'type': 'str'}, + "administrator_account": {"key": "administratorAccount", "type": "VirtualMachineSshCredentials"}, + "compute_type": {"key": "computeType", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword administrator_account: Admin credentials for virtual machine. :paramtype administrator_account: ~azure.mgmt.machinelearningservices.models.VirtualMachineSshCredentials """ super(VirtualMachineSecrets, self).__init__(**kwargs) - self.administrator_account = kwargs.get('administrator_account', None) - self.compute_type = 'VirtualMachine' # type: str + self.administrator_account = kwargs.get("administrator_account", None) + self.compute_type = "VirtualMachine" # type: str class VirtualMachineSize(msrest.serialization.Model): @@ -34662,35 +32750,32 @@ class VirtualMachineSize(msrest.serialization.Model): """ _validation = { - 'name': {'readonly': True}, - 'family': {'readonly': True}, - 'v_cp_us': {'readonly': True}, - 'gpus': {'readonly': True}, - 'os_vhd_size_mb': {'readonly': True}, - 'max_resource_volume_mb': {'readonly': True}, - 'memory_gb': {'readonly': True}, - 'low_priority_capable': {'readonly': True}, - 'premium_io': {'readonly': True}, + "name": {"readonly": True}, + "family": {"readonly": True}, + "v_cp_us": {"readonly": True}, + "gpus": {"readonly": True}, + "os_vhd_size_mb": {"readonly": True}, + "max_resource_volume_mb": {"readonly": True}, + "memory_gb": {"readonly": True}, + "low_priority_capable": {"readonly": True}, + "premium_io": {"readonly": True}, } _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'family': {'key': 'family', 'type': 'str'}, - 'v_cp_us': {'key': 'vCPUs', 'type': 'int'}, - 'gpus': {'key': 'gpus', 'type': 'int'}, - 'os_vhd_size_mb': {'key': 'osVhdSizeMB', 'type': 'int'}, - 'max_resource_volume_mb': {'key': 'maxResourceVolumeMB', 'type': 'int'}, - 'memory_gb': {'key': 'memoryGB', 'type': 'float'}, - 'low_priority_capable': {'key': 'lowPriorityCapable', 'type': 'bool'}, - 'premium_io': {'key': 'premiumIO', 'type': 'bool'}, - 'estimated_vm_prices': {'key': 'estimatedVMPrices', 'type': 'EstimatedVMPrices'}, - 'supported_compute_types': {'key': 'supportedComputeTypes', 'type': '[str]'}, + "name": {"key": "name", "type": "str"}, + "family": {"key": "family", "type": "str"}, + "v_cp_us": {"key": "vCPUs", "type": "int"}, + "gpus": {"key": "gpus", "type": "int"}, + "os_vhd_size_mb": {"key": "osVhdSizeMB", "type": "int"}, + "max_resource_volume_mb": {"key": "maxResourceVolumeMB", "type": "int"}, + "memory_gb": {"key": "memoryGB", "type": "float"}, + "low_priority_capable": {"key": "lowPriorityCapable", "type": "bool"}, + "premium_io": {"key": "premiumIO", "type": "bool"}, + "estimated_vm_prices": {"key": "estimatedVMPrices", "type": "EstimatedVMPrices"}, + "supported_compute_types": {"key": "supportedComputeTypes", "type": "[str]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword estimated_vm_prices: The estimated price information for using a VM. :paramtype estimated_vm_prices: ~azure.mgmt.machinelearningservices.models.EstimatedVMPrices @@ -34708,8 +32793,8 @@ def __init__( self.memory_gb = None self.low_priority_capable = None self.premium_io = None - self.estimated_vm_prices = kwargs.get('estimated_vm_prices', None) - self.supported_compute_types = kwargs.get('supported_compute_types', None) + self.estimated_vm_prices = kwargs.get("estimated_vm_prices", None) + self.supported_compute_types = kwargs.get("supported_compute_types", None) class VirtualMachineSizeListResult(msrest.serialization.Model): @@ -34720,19 +32805,16 @@ class VirtualMachineSizeListResult(msrest.serialization.Model): """ _attribute_map = { - 'value': {'key': 'value', 'type': '[VirtualMachineSize]'}, + "value": {"key": "value", "type": "[VirtualMachineSize]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword value: The list of virtual machine sizes supported by AmlCompute. :paramtype value: list[~azure.mgmt.machinelearningservices.models.VirtualMachineSize] """ super(VirtualMachineSizeListResult, self).__init__(**kwargs) - self.value = kwargs.get('value', None) + self.value = kwargs.get("value", None) class VirtualMachineSshCredentials(msrest.serialization.Model): @@ -34749,16 +32831,13 @@ class VirtualMachineSshCredentials(msrest.serialization.Model): """ _attribute_map = { - 'username': {'key': 'username', 'type': 'str'}, - 'password': {'key': 'password', 'type': 'str'}, - 'public_key_data': {'key': 'publicKeyData', 'type': 'str'}, - 'private_key_data': {'key': 'privateKeyData', 'type': 'str'}, + "username": {"key": "username", "type": "str"}, + "password": {"key": "password", "type": "str"}, + "public_key_data": {"key": "publicKeyData", "type": "str"}, + "private_key_data": {"key": "privateKeyData", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword username: Username of admin account. :paramtype username: str @@ -34770,10 +32849,10 @@ def __init__( :paramtype private_key_data: str """ super(VirtualMachineSshCredentials, self).__init__(**kwargs) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.public_key_data = kwargs.get('public_key_data', None) - self.private_key_data = kwargs.get('private_key_data', None) + self.username = kwargs.get("username", None) + self.password = kwargs.get("password", None) + self.public_key_data = kwargs.get("public_key_data", None) + self.private_key_data = kwargs.get("private_key_data", None) class VolumeDefinition(msrest.serialization.Model): @@ -34799,20 +32878,17 @@ class VolumeDefinition(msrest.serialization.Model): """ _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'read_only': {'key': 'readOnly', 'type': 'bool'}, - 'source': {'key': 'source', 'type': 'str'}, - 'target': {'key': 'target', 'type': 'str'}, - 'consistency': {'key': 'consistency', 'type': 'str'}, - 'bind': {'key': 'bind', 'type': 'BindOptions'}, - 'volume': {'key': 'volume', 'type': 'VolumeOptions'}, - 'tmpfs': {'key': 'tmpfs', 'type': 'TmpfsOptions'}, + "type": {"key": "type", "type": "str"}, + "read_only": {"key": "readOnly", "type": "bool"}, + "source": {"key": "source", "type": "str"}, + "target": {"key": "target", "type": "str"}, + "consistency": {"key": "consistency", "type": "str"}, + "bind": {"key": "bind", "type": "BindOptions"}, + "volume": {"key": "volume", "type": "VolumeOptions"}, + "tmpfs": {"key": "tmpfs", "type": "TmpfsOptions"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword type: Type of Volume Definition. Possible Values: bind,volume,tmpfs,npipe. Possible values include: "bind", "volume", "tmpfs", "npipe". Default value: "bind". @@ -34834,14 +32910,14 @@ def __init__( :paramtype tmpfs: ~azure.mgmt.machinelearningservices.models.TmpfsOptions """ super(VolumeDefinition, self).__init__(**kwargs) - self.type = kwargs.get('type', "bind") - self.read_only = kwargs.get('read_only', None) - self.source = kwargs.get('source', None) - self.target = kwargs.get('target', None) - self.consistency = kwargs.get('consistency', None) - self.bind = kwargs.get('bind', None) - self.volume = kwargs.get('volume', None) - self.tmpfs = kwargs.get('tmpfs', None) + self.type = kwargs.get("type", "bind") + self.read_only = kwargs.get("read_only", None) + self.source = kwargs.get("source", None) + self.target = kwargs.get("target", None) + self.consistency = kwargs.get("consistency", None) + self.bind = kwargs.get("bind", None) + self.volume = kwargs.get("volume", None) + self.tmpfs = kwargs.get("tmpfs", None) class VolumeOptions(msrest.serialization.Model): @@ -34852,19 +32928,16 @@ class VolumeOptions(msrest.serialization.Model): """ _attribute_map = { - 'nocopy': {'key': 'nocopy', 'type': 'bool'}, + "nocopy": {"key": "nocopy", "type": "bool"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword nocopy: Indicate whether volume is nocopy. :paramtype nocopy: bool """ super(VolumeOptions, self).__init__(**kwargs) - self.nocopy = kwargs.get('nocopy', None) + self.nocopy = kwargs.get("nocopy", None) class Workspace(Resource): @@ -35000,81 +33073,86 @@ class Workspace(Resource): """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'ml_flow_tracking_uri': {'readonly': True}, - 'notebook_info': {'readonly': True}, - 'private_endpoint_connections': {'readonly': True}, - 'private_link_count': {'readonly': True}, - 'provisioning_state': {'readonly': True}, - 'service_provisioned_resource_group': {'readonly': True}, - 'storage_hns_enabled': {'readonly': True}, - 'tenant_id': {'readonly': True}, - 'workspace_id': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'}, - 'kind': {'key': 'kind', 'type': 'str'}, - 'location': {'key': 'location', 'type': 'str'}, - 'sku': {'key': 'sku', 'type': 'Sku'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'allow_public_access_when_behind_vnet': {'key': 'properties.allowPublicAccessWhenBehindVnet', 'type': 'bool'}, - 'application_insights': {'key': 'properties.applicationInsights', 'type': 'str'}, - 'associated_workspaces': {'key': 'properties.associatedWorkspaces', 'type': '[str]'}, - 'container_registries': {'key': 'properties.containerRegistries', 'type': '[str]'}, - 'container_registry': {'key': 'properties.containerRegistry', 'type': 'str'}, - 'description': {'key': 'properties.description', 'type': 'str'}, - 'discovery_url': {'key': 'properties.discoveryUrl', 'type': 'str'}, - 'enable_data_isolation': {'key': 'properties.enableDataIsolation', 'type': 'bool'}, - 'enable_software_bill_of_materials': {'key': 'properties.enableSoftwareBillOfMaterials', 'type': 'bool'}, - 'encryption': {'key': 'properties.encryption', 'type': 'EncryptionProperty'}, - 'existing_workspaces': {'key': 'properties.existingWorkspaces', 'type': '[str]'}, - 'feature_store_settings': {'key': 'properties.featureStoreSettings', 'type': 'FeatureStoreSettings'}, - 'friendly_name': {'key': 'properties.friendlyName', 'type': 'str'}, - 'hbi_workspace': {'key': 'properties.hbiWorkspace', 'type': 'bool'}, - 'hub_resource_id': {'key': 'properties.hubResourceId', 'type': 'str'}, - 'image_build_compute': {'key': 'properties.imageBuildCompute', 'type': 'str'}, - 'ip_allowlist': {'key': 'properties.ipAllowlist', 'type': '[str]'}, - 'key_vault': {'key': 'properties.keyVault', 'type': 'str'}, - 'key_vaults': {'key': 'properties.keyVaults', 'type': '[str]'}, - 'managed_network': {'key': 'properties.managedNetwork', 'type': 'ManagedNetworkSettings'}, - 'ml_flow_tracking_uri': {'key': 'properties.mlFlowTrackingUri', 'type': 'str'}, - 'notebook_info': {'key': 'properties.notebookInfo', 'type': 'NotebookResourceInfo'}, - 'primary_user_assigned_identity': {'key': 'properties.primaryUserAssignedIdentity', 'type': 'str'}, - 'private_endpoint_connections': {'key': 'properties.privateEndpointConnections', - 'type': '[PrivateEndpointConnection]'}, - 'private_link_count': {'key': 'properties.privateLinkCount', 'type': 'int'}, - 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, - 'public_network_access': {'key': 'properties.publicNetworkAccess', 'type': 'str'}, - 'serverless_compute_settings': {'key': 'properties.serverlessComputeSettings', - 'type': 'ServerlessComputeSettings'}, - 'service_managed_resources_settings': {'key': 'properties.serviceManagedResourcesSettings', - 'type': 'ServiceManagedResourcesSettings'}, - 'service_provisioned_resource_group': {'key': 'properties.serviceProvisionedResourceGroup', 'type': 'str'}, - 'shared_private_link_resources': {'key': 'properties.sharedPrivateLinkResources', - 'type': '[SharedPrivateLinkResource]'}, - 'soft_delete_retention_in_days': {'key': 'properties.softDeleteRetentionInDays', 'type': 'int'}, - 'storage_account': {'key': 'properties.storageAccount', 'type': 'str'}, - 'storage_accounts': {'key': 'properties.storageAccounts', 'type': '[str]'}, - 'storage_hns_enabled': {'key': 'properties.storageHnsEnabled', 'type': 'bool'}, - 'system_datastores_auth_mode': {'key': 'properties.systemDatastoresAuthMode', 'type': 'str'}, - 'tenant_id': {'key': 'properties.tenantId', 'type': 'str'}, - 'v1_legacy_mode': {'key': 'properties.v1LegacyMode', 'type': 'bool'}, - 'workspace_hub_config': {'key': 'properties.workspaceHubConfig', 'type': 'WorkspaceHubConfig'}, - 'workspace_id': {'key': 'properties.workspaceId', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "ml_flow_tracking_uri": {"readonly": True}, + "notebook_info": {"readonly": True}, + "private_endpoint_connections": {"readonly": True}, + "private_link_count": {"readonly": True}, + "provisioning_state": {"readonly": True}, + "service_provisioned_resource_group": {"readonly": True}, + "storage_hns_enabled": {"readonly": True}, + "tenant_id": {"readonly": True}, + "workspace_id": {"readonly": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "identity": {"key": "identity", "type": "ManagedServiceIdentity"}, + "kind": {"key": "kind", "type": "str"}, + "location": {"key": "location", "type": "str"}, + "sku": {"key": "sku", "type": "Sku"}, + "tags": {"key": "tags", "type": "{str}"}, + "allow_public_access_when_behind_vnet": {"key": "properties.allowPublicAccessWhenBehindVnet", "type": "bool"}, + "application_insights": {"key": "properties.applicationInsights", "type": "str"}, + "associated_workspaces": {"key": "properties.associatedWorkspaces", "type": "[str]"}, + "container_registries": {"key": "properties.containerRegistries", "type": "[str]"}, + "container_registry": {"key": "properties.containerRegistry", "type": "str"}, + "description": {"key": "properties.description", "type": "str"}, + "discovery_url": {"key": "properties.discoveryUrl", "type": "str"}, + "enable_data_isolation": {"key": "properties.enableDataIsolation", "type": "bool"}, + "enable_software_bill_of_materials": {"key": "properties.enableSoftwareBillOfMaterials", "type": "bool"}, + "encryption": {"key": "properties.encryption", "type": "EncryptionProperty"}, + "existing_workspaces": {"key": "properties.existingWorkspaces", "type": "[str]"}, + "feature_store_settings": {"key": "properties.featureStoreSettings", "type": "FeatureStoreSettings"}, + "friendly_name": {"key": "properties.friendlyName", "type": "str"}, + "hbi_workspace": {"key": "properties.hbiWorkspace", "type": "bool"}, + "hub_resource_id": {"key": "properties.hubResourceId", "type": "str"}, + "image_build_compute": {"key": "properties.imageBuildCompute", "type": "str"}, + "ip_allowlist": {"key": "properties.ipAllowlist", "type": "[str]"}, + "key_vault": {"key": "properties.keyVault", "type": "str"}, + "key_vaults": {"key": "properties.keyVaults", "type": "[str]"}, + "managed_network": {"key": "properties.managedNetwork", "type": "ManagedNetworkSettings"}, + "ml_flow_tracking_uri": {"key": "properties.mlFlowTrackingUri", "type": "str"}, + "notebook_info": {"key": "properties.notebookInfo", "type": "NotebookResourceInfo"}, + "primary_user_assigned_identity": {"key": "properties.primaryUserAssignedIdentity", "type": "str"}, + "private_endpoint_connections": { + "key": "properties.privateEndpointConnections", + "type": "[PrivateEndpointConnection]", + }, + "private_link_count": {"key": "properties.privateLinkCount", "type": "int"}, + "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, + "public_network_access": {"key": "properties.publicNetworkAccess", "type": "str"}, + "serverless_compute_settings": { + "key": "properties.serverlessComputeSettings", + "type": "ServerlessComputeSettings", + }, + "service_managed_resources_settings": { + "key": "properties.serviceManagedResourcesSettings", + "type": "ServiceManagedResourcesSettings", + }, + "service_provisioned_resource_group": {"key": "properties.serviceProvisionedResourceGroup", "type": "str"}, + "shared_private_link_resources": { + "key": "properties.sharedPrivateLinkResources", + "type": "[SharedPrivateLinkResource]", + }, + "soft_delete_retention_in_days": {"key": "properties.softDeleteRetentionInDays", "type": "int"}, + "storage_account": {"key": "properties.storageAccount", "type": "str"}, + "storage_accounts": {"key": "properties.storageAccounts", "type": "[str]"}, + "storage_hns_enabled": {"key": "properties.storageHnsEnabled", "type": "bool"}, + "system_datastores_auth_mode": {"key": "properties.systemDatastoresAuthMode", "type": "str"}, + "tenant_id": {"key": "properties.tenantId", "type": "str"}, + "v1_legacy_mode": {"key": "properties.v1LegacyMode", "type": "bool"}, + "workspace_hub_config": {"key": "properties.workspaceHubConfig", "type": "WorkspaceHubConfig"}, + "workspace_id": {"key": "properties.workspaceId", "type": "str"}, + } + + def __init__(self, **kwargs): """ :keyword identity: Managed service identity (system assigned and/or user assigned identities). :paramtype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity @@ -35169,50 +33247,50 @@ def __init__( :paramtype workspace_hub_config: ~azure.mgmt.machinelearningservices.models.WorkspaceHubConfig """ super(Workspace, self).__init__(**kwargs) - self.identity = kwargs.get('identity', None) - self.kind = kwargs.get('kind', None) - self.location = kwargs.get('location', None) - self.sku = kwargs.get('sku', None) - self.tags = kwargs.get('tags', None) - self.allow_public_access_when_behind_vnet = kwargs.get('allow_public_access_when_behind_vnet', None) - self.application_insights = kwargs.get('application_insights', None) - self.associated_workspaces = kwargs.get('associated_workspaces', None) - self.container_registries = kwargs.get('container_registries', None) - self.container_registry = kwargs.get('container_registry', None) - self.description = kwargs.get('description', None) - self.discovery_url = kwargs.get('discovery_url', None) - self.enable_data_isolation = kwargs.get('enable_data_isolation', None) - self.enable_software_bill_of_materials = kwargs.get('enable_software_bill_of_materials', None) - self.encryption = kwargs.get('encryption', None) - self.existing_workspaces = kwargs.get('existing_workspaces', None) - self.feature_store_settings = kwargs.get('feature_store_settings', None) - self.friendly_name = kwargs.get('friendly_name', None) - self.hbi_workspace = kwargs.get('hbi_workspace', None) - self.hub_resource_id = kwargs.get('hub_resource_id', None) - self.image_build_compute = kwargs.get('image_build_compute', None) - self.ip_allowlist = kwargs.get('ip_allowlist', None) - self.key_vault = kwargs.get('key_vault', None) - self.key_vaults = kwargs.get('key_vaults', None) - self.managed_network = kwargs.get('managed_network', None) + self.identity = kwargs.get("identity", None) + self.kind = kwargs.get("kind", None) + self.location = kwargs.get("location", None) + self.sku = kwargs.get("sku", None) + self.tags = kwargs.get("tags", None) + self.allow_public_access_when_behind_vnet = kwargs.get("allow_public_access_when_behind_vnet", None) + self.application_insights = kwargs.get("application_insights", None) + self.associated_workspaces = kwargs.get("associated_workspaces", None) + self.container_registries = kwargs.get("container_registries", None) + self.container_registry = kwargs.get("container_registry", None) + self.description = kwargs.get("description", None) + self.discovery_url = kwargs.get("discovery_url", None) + self.enable_data_isolation = kwargs.get("enable_data_isolation", None) + self.enable_software_bill_of_materials = kwargs.get("enable_software_bill_of_materials", None) + self.encryption = kwargs.get("encryption", None) + self.existing_workspaces = kwargs.get("existing_workspaces", None) + self.feature_store_settings = kwargs.get("feature_store_settings", None) + self.friendly_name = kwargs.get("friendly_name", None) + self.hbi_workspace = kwargs.get("hbi_workspace", None) + self.hub_resource_id = kwargs.get("hub_resource_id", None) + self.image_build_compute = kwargs.get("image_build_compute", None) + self.ip_allowlist = kwargs.get("ip_allowlist", None) + self.key_vault = kwargs.get("key_vault", None) + self.key_vaults = kwargs.get("key_vaults", None) + self.managed_network = kwargs.get("managed_network", None) self.ml_flow_tracking_uri = None self.notebook_info = None - self.primary_user_assigned_identity = kwargs.get('primary_user_assigned_identity', None) + self.primary_user_assigned_identity = kwargs.get("primary_user_assigned_identity", None) self.private_endpoint_connections = None self.private_link_count = None self.provisioning_state = None - self.public_network_access = kwargs.get('public_network_access', None) - self.serverless_compute_settings = kwargs.get('serverless_compute_settings', None) - self.service_managed_resources_settings = kwargs.get('service_managed_resources_settings', None) + self.public_network_access = kwargs.get("public_network_access", None) + self.serverless_compute_settings = kwargs.get("serverless_compute_settings", None) + self.service_managed_resources_settings = kwargs.get("service_managed_resources_settings", None) self.service_provisioned_resource_group = None - self.shared_private_link_resources = kwargs.get('shared_private_link_resources', None) - self.soft_delete_retention_in_days = kwargs.get('soft_delete_retention_in_days', None) - self.storage_account = kwargs.get('storage_account', None) - self.storage_accounts = kwargs.get('storage_accounts', None) + self.shared_private_link_resources = kwargs.get("shared_private_link_resources", None) + self.soft_delete_retention_in_days = kwargs.get("soft_delete_retention_in_days", None) + self.storage_account = kwargs.get("storage_account", None) + self.storage_accounts = kwargs.get("storage_accounts", None) self.storage_hns_enabled = None - self.system_datastores_auth_mode = kwargs.get('system_datastores_auth_mode', None) + self.system_datastores_auth_mode = kwargs.get("system_datastores_auth_mode", None) self.tenant_id = None - self.v1_legacy_mode = kwargs.get('v1_legacy_mode', None) - self.workspace_hub_config = kwargs.get('workspace_hub_config', None) + self.v1_legacy_mode = kwargs.get("v1_legacy_mode", None) + self.workspace_hub_config = kwargs.get("workspace_hub_config", None) self.workspace_id = None @@ -35226,14 +33304,11 @@ class WorkspaceConnectionAccessKey(msrest.serialization.Model): """ _attribute_map = { - 'access_key_id': {'key': 'accessKeyId', 'type': 'str'}, - 'secret_access_key': {'key': 'secretAccessKey', 'type': 'str'}, + "access_key_id": {"key": "accessKeyId", "type": "str"}, + "secret_access_key": {"key": "secretAccessKey", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword access_key_id: :paramtype access_key_id: str @@ -35241,8 +33316,8 @@ def __init__( :paramtype secret_access_key: str """ super(WorkspaceConnectionAccessKey, self).__init__(**kwargs) - self.access_key_id = kwargs.get('access_key_id', None) - self.secret_access_key = kwargs.get('secret_access_key', None) + self.access_key_id = kwargs.get("access_key_id", None) + self.secret_access_key = kwargs.get("secret_access_key", None) class WorkspaceConnectionApiKey(msrest.serialization.Model): @@ -35253,19 +33328,16 @@ class WorkspaceConnectionApiKey(msrest.serialization.Model): """ _attribute_map = { - 'key': {'key': 'key', 'type': 'str'}, + "key": {"key": "key", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword key: :paramtype key: str """ super(WorkspaceConnectionApiKey, self).__init__(**kwargs) - self.key = kwargs.get('key', None) + self.key = kwargs.get("key", None) class WorkspaceConnectionManagedIdentity(msrest.serialization.Model): @@ -35278,14 +33350,11 @@ class WorkspaceConnectionManagedIdentity(msrest.serialization.Model): """ _attribute_map = { - 'client_id': {'key': 'clientId', 'type': 'str'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, + "client_id": {"key": "clientId", "type": "str"}, + "resource_id": {"key": "resourceId", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword client_id: :paramtype client_id: str @@ -35293,50 +33362,47 @@ def __init__( :paramtype resource_id: str """ super(WorkspaceConnectionManagedIdentity, self).__init__(**kwargs) - self.client_id = kwargs.get('client_id', None) - self.resource_id = kwargs.get('resource_id', None) + self.client_id = kwargs.get("client_id", None) + self.resource_id = kwargs.get("resource_id", None) class WorkspaceConnectionOAuth2(msrest.serialization.Model): """ClientId and ClientSecret are required. Other properties are optional -depending on each OAuth2 provider's implementation. - - :ivar auth_url: Required by Concur connection category. - :vartype auth_url: str - :ivar client_id: Client id in the format of UUID. - :vartype client_id: str - :ivar client_secret: - :vartype client_secret: str - :ivar developer_token: Required by GoogleAdWords connection category. - :vartype developer_token: str - :ivar password: - :vartype password: str - :ivar refresh_token: Required by GoogleBigQuery, GoogleAdWords, Hubspot, QuickBooks, Square, - Xero, Zoho - where user needs to get RefreshToken offline. - :vartype refresh_token: str - :ivar tenant_id: Required by QuickBooks and Xero connection categories. - :vartype tenant_id: str - :ivar username: Concur, ServiceNow auth server AccessToken grant type is 'Password' - which requires UsernamePassword. - :vartype username: str + depending on each OAuth2 provider's implementation. + + :ivar auth_url: Required by Concur connection category. + :vartype auth_url: str + :ivar client_id: Client id in the format of UUID. + :vartype client_id: str + :ivar client_secret: + :vartype client_secret: str + :ivar developer_token: Required by GoogleAdWords connection category. + :vartype developer_token: str + :ivar password: + :vartype password: str + :ivar refresh_token: Required by GoogleBigQuery, GoogleAdWords, Hubspot, QuickBooks, Square, + Xero, Zoho + where user needs to get RefreshToken offline. + :vartype refresh_token: str + :ivar tenant_id: Required by QuickBooks and Xero connection categories. + :vartype tenant_id: str + :ivar username: Concur, ServiceNow auth server AccessToken grant type is 'Password' + which requires UsernamePassword. + :vartype username: str """ _attribute_map = { - 'auth_url': {'key': 'authUrl', 'type': 'str'}, - 'client_id': {'key': 'clientId', 'type': 'str'}, - 'client_secret': {'key': 'clientSecret', 'type': 'str'}, - 'developer_token': {'key': 'developerToken', 'type': 'str'}, - 'password': {'key': 'password', 'type': 'str'}, - 'refresh_token': {'key': 'refreshToken', 'type': 'str'}, - 'tenant_id': {'key': 'tenantId', 'type': 'str'}, - 'username': {'key': 'username', 'type': 'str'}, + "auth_url": {"key": "authUrl", "type": "str"}, + "client_id": {"key": "clientId", "type": "str"}, + "client_secret": {"key": "clientSecret", "type": "str"}, + "developer_token": {"key": "developerToken", "type": "str"}, + "password": {"key": "password", "type": "str"}, + "refresh_token": {"key": "refreshToken", "type": "str"}, + "tenant_id": {"key": "tenantId", "type": "str"}, + "username": {"key": "username", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword auth_url: Required by Concur connection category. :paramtype auth_url: str @@ -35359,14 +33425,14 @@ def __init__( :paramtype username: str """ super(WorkspaceConnectionOAuth2, self).__init__(**kwargs) - self.auth_url = kwargs.get('auth_url', None) - self.client_id = kwargs.get('client_id', None) - self.client_secret = kwargs.get('client_secret', None) - self.developer_token = kwargs.get('developer_token', None) - self.password = kwargs.get('password', None) - self.refresh_token = kwargs.get('refresh_token', None) - self.tenant_id = kwargs.get('tenant_id', None) - self.username = kwargs.get('username', None) + self.auth_url = kwargs.get("auth_url", None) + self.client_id = kwargs.get("client_id", None) + self.client_secret = kwargs.get("client_secret", None) + self.developer_token = kwargs.get("developer_token", None) + self.password = kwargs.get("password", None) + self.refresh_token = kwargs.get("refresh_token", None) + self.tenant_id = kwargs.get("tenant_id", None) + self.username = kwargs.get("username", None) class WorkspaceConnectionPersonalAccessToken(msrest.serialization.Model): @@ -35377,19 +33443,16 @@ class WorkspaceConnectionPersonalAccessToken(msrest.serialization.Model): """ _attribute_map = { - 'pat': {'key': 'pat', 'type': 'str'}, + "pat": {"key": "pat", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword pat: :paramtype pat: str """ super(WorkspaceConnectionPersonalAccessToken, self).__init__(**kwargs) - self.pat = kwargs.get('pat', None) + self.pat = kwargs.get("pat", None) class WorkspaceConnectionPropertiesV2BasicResource(Resource): @@ -35415,32 +33478,29 @@ class WorkspaceConnectionPropertiesV2BasicResource(Resource): """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'properties': {'key': 'properties', 'type': 'WorkspaceConnectionPropertiesV2'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "WorkspaceConnectionPropertiesV2"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword properties: Required. :paramtype properties: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2 """ super(WorkspaceConnectionPropertiesV2BasicResource, self).__init__(**kwargs) - self.properties = kwargs['properties'] + self.properties = kwargs["properties"] class WorkspaceConnectionPropertiesV2BasicResourceArmPaginatedResult(msrest.serialization.Model): @@ -35454,14 +33514,11 @@ class WorkspaceConnectionPropertiesV2BasicResourceArmPaginatedResult(msrest.seri """ _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[WorkspaceConnectionPropertiesV2BasicResource]'}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[WorkspaceConnectionPropertiesV2BasicResource]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword next_link: :paramtype next_link: str @@ -35470,8 +33527,8 @@ def __init__( list[~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource] """ super(WorkspaceConnectionPropertiesV2BasicResourceArmPaginatedResult, self).__init__(**kwargs) - self.next_link = kwargs.get('next_link', None) - self.value = kwargs.get('value', None) + self.next_link = kwargs.get("next_link", None) + self.value = kwargs.get("value", None) class WorkspaceConnectionServicePrincipal(msrest.serialization.Model): @@ -35486,15 +33543,12 @@ class WorkspaceConnectionServicePrincipal(msrest.serialization.Model): """ _attribute_map = { - 'client_id': {'key': 'clientId', 'type': 'str'}, - 'client_secret': {'key': 'clientSecret', 'type': 'str'}, - 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + "client_id": {"key": "clientId", "type": "str"}, + "client_secret": {"key": "clientSecret", "type": "str"}, + "tenant_id": {"key": "tenantId", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword client_id: :paramtype client_id: str @@ -35504,9 +33558,9 @@ def __init__( :paramtype tenant_id: str """ super(WorkspaceConnectionServicePrincipal, self).__init__(**kwargs) - self.client_id = kwargs.get('client_id', None) - self.client_secret = kwargs.get('client_secret', None) - self.tenant_id = kwargs.get('tenant_id', None) + self.client_id = kwargs.get("client_id", None) + self.client_secret = kwargs.get("client_secret", None) + self.tenant_id = kwargs.get("tenant_id", None) class WorkspaceConnectionSharedAccessSignature(msrest.serialization.Model): @@ -35517,19 +33571,16 @@ class WorkspaceConnectionSharedAccessSignature(msrest.serialization.Model): """ _attribute_map = { - 'sas': {'key': 'sas', 'type': 'str'}, + "sas": {"key": "sas", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword sas: :paramtype sas: str """ super(WorkspaceConnectionSharedAccessSignature, self).__init__(**kwargs) - self.sas = kwargs.get('sas', None) + self.sas = kwargs.get("sas", None) class WorkspaceConnectionUpdateParameter(msrest.serialization.Model): @@ -35541,13 +33592,10 @@ class WorkspaceConnectionUpdateParameter(msrest.serialization.Model): """ _attribute_map = { - 'properties': {'key': 'properties', 'type': 'WorkspaceConnectionPropertiesV2'}, + "properties": {"key": "properties", "type": "WorkspaceConnectionPropertiesV2"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword properties: The properties that the machine learning workspace connection will be updated with. @@ -35555,7 +33603,7 @@ def __init__( ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2 """ super(WorkspaceConnectionUpdateParameter, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) + self.properties = kwargs.get("properties", None) class WorkspaceConnectionUsernamePassword(msrest.serialization.Model): @@ -35571,15 +33619,12 @@ class WorkspaceConnectionUsernamePassword(msrest.serialization.Model): """ _attribute_map = { - 'password': {'key': 'password', 'type': 'str'}, - 'security_token': {'key': 'securityToken', 'type': 'str'}, - 'username': {'key': 'username', 'type': 'str'}, + "password": {"key": "password", "type": "str"}, + "security_token": {"key": "securityToken", "type": "str"}, + "username": {"key": "username", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword password: :paramtype password: str @@ -35590,9 +33635,9 @@ def __init__( :paramtype username: str """ super(WorkspaceConnectionUsernamePassword, self).__init__(**kwargs) - self.password = kwargs.get('password', None) - self.security_token = kwargs.get('security_token', None) - self.username = kwargs.get('username', None) + self.password = kwargs.get("password", None) + self.security_token = kwargs.get("security_token", None) + self.username = kwargs.get("username", None) class WorkspaceHubConfig(msrest.serialization.Model): @@ -35605,14 +33650,11 @@ class WorkspaceHubConfig(msrest.serialization.Model): """ _attribute_map = { - 'additional_workspace_storage_accounts': {'key': 'additionalWorkspaceStorageAccounts', 'type': '[str]'}, - 'default_workspace_resource_group': {'key': 'defaultWorkspaceResourceGroup', 'type': 'str'}, + "additional_workspace_storage_accounts": {"key": "additionalWorkspaceStorageAccounts", "type": "[str]"}, + "default_workspace_resource_group": {"key": "defaultWorkspaceResourceGroup", "type": "str"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword additional_workspace_storage_accounts: :paramtype additional_workspace_storage_accounts: list[str] @@ -35620,8 +33662,8 @@ def __init__( :paramtype default_workspace_resource_group: str """ super(WorkspaceHubConfig, self).__init__(**kwargs) - self.additional_workspace_storage_accounts = kwargs.get('additional_workspace_storage_accounts', None) - self.default_workspace_resource_group = kwargs.get('default_workspace_resource_group', None) + self.additional_workspace_storage_accounts = kwargs.get("additional_workspace_storage_accounts", None) + self.default_workspace_resource_group = kwargs.get("default_workspace_resource_group", None) class WorkspaceListResult(msrest.serialization.Model): @@ -35636,14 +33678,11 @@ class WorkspaceListResult(msrest.serialization.Model): """ _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[Workspace]'}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[Workspace]"}, } - def __init__( - self, - **kwargs - ): + def __init__(self, **kwargs): """ :keyword next_link: The link to the next page constructed using the continuationToken. If null, there are no additional pages. @@ -35653,8 +33692,8 @@ def __init__( :paramtype value: list[~azure.mgmt.machinelearningservices.models.Workspace] """ super(WorkspaceListResult, self).__init__(**kwargs) - self.next_link = kwargs.get('next_link', None) - self.value = kwargs.get('value', None) + self.next_link = kwargs.get("next_link", None) + self.value = kwargs.get("value", None) class WorkspacePrivateEndpointResource(msrest.serialization.Model): @@ -35670,21 +33709,17 @@ class WorkspacePrivateEndpointResource(msrest.serialization.Model): """ _validation = { - 'id': {'readonly': True}, - 'subnet_arm_id': {'readonly': True}, + "id": {"readonly": True}, + "subnet_arm_id": {"readonly": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'subnet_arm_id': {'key': 'subnetArmId', 'type': 'str'}, + "id": {"key": "id", "type": "str"}, + "subnet_arm_id": {"key": "subnetArmId", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ + def __init__(self, **kwargs): + """ """ super(WorkspacePrivateEndpointResource, self).__init__(**kwargs) self.id = None self.subnet_arm_id = None @@ -35745,34 +33780,35 @@ class WorkspaceUpdateParameters(msrest.serialization.Model): """ _attribute_map = { - 'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'}, - 'sku': {'key': 'sku', 'type': 'Sku'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'application_insights': {'key': 'properties.applicationInsights', 'type': 'str'}, - 'container_registry': {'key': 'properties.containerRegistry', 'type': 'str'}, - 'description': {'key': 'properties.description', 'type': 'str'}, - 'enable_data_isolation': {'key': 'properties.enableDataIsolation', 'type': 'bool'}, - 'enable_software_bill_of_materials': {'key': 'properties.enableSoftwareBillOfMaterials', 'type': 'bool'}, - 'encryption': {'key': 'properties.encryption', 'type': 'EncryptionUpdateProperties'}, - 'feature_store_settings': {'key': 'properties.featureStoreSettings', 'type': 'FeatureStoreSettings'}, - 'friendly_name': {'key': 'properties.friendlyName', 'type': 'str'}, - 'image_build_compute': {'key': 'properties.imageBuildCompute', 'type': 'str'}, - 'ip_allowlist': {'key': 'properties.ipAllowlist', 'type': '[str]'}, - 'managed_network': {'key': 'properties.managedNetwork', 'type': 'ManagedNetworkSettings'}, - 'primary_user_assigned_identity': {'key': 'properties.primaryUserAssignedIdentity', 'type': 'str'}, - 'public_network_access': {'key': 'properties.publicNetworkAccess', 'type': 'str'}, - 'serverless_compute_settings': {'key': 'properties.serverlessComputeSettings', - 'type': 'ServerlessComputeSettings'}, - 'service_managed_resources_settings': {'key': 'properties.serviceManagedResourcesSettings', - 'type': 'ServiceManagedResourcesSettings'}, - 'soft_delete_retention_in_days': {'key': 'properties.softDeleteRetentionInDays', 'type': 'int'}, - 'v1_legacy_mode': {'key': 'properties.v1LegacyMode', 'type': 'bool'}, - } - - def __init__( - self, - **kwargs - ): + "identity": {"key": "identity", "type": "ManagedServiceIdentity"}, + "sku": {"key": "sku", "type": "Sku"}, + "tags": {"key": "tags", "type": "{str}"}, + "application_insights": {"key": "properties.applicationInsights", "type": "str"}, + "container_registry": {"key": "properties.containerRegistry", "type": "str"}, + "description": {"key": "properties.description", "type": "str"}, + "enable_data_isolation": {"key": "properties.enableDataIsolation", "type": "bool"}, + "enable_software_bill_of_materials": {"key": "properties.enableSoftwareBillOfMaterials", "type": "bool"}, + "encryption": {"key": "properties.encryption", "type": "EncryptionUpdateProperties"}, + "feature_store_settings": {"key": "properties.featureStoreSettings", "type": "FeatureStoreSettings"}, + "friendly_name": {"key": "properties.friendlyName", "type": "str"}, + "image_build_compute": {"key": "properties.imageBuildCompute", "type": "str"}, + "ip_allowlist": {"key": "properties.ipAllowlist", "type": "[str]"}, + "managed_network": {"key": "properties.managedNetwork", "type": "ManagedNetworkSettings"}, + "primary_user_assigned_identity": {"key": "properties.primaryUserAssignedIdentity", "type": "str"}, + "public_network_access": {"key": "properties.publicNetworkAccess", "type": "str"}, + "serverless_compute_settings": { + "key": "properties.serverlessComputeSettings", + "type": "ServerlessComputeSettings", + }, + "service_managed_resources_settings": { + "key": "properties.serviceManagedResourcesSettings", + "type": "ServiceManagedResourcesSettings", + }, + "soft_delete_retention_in_days": {"key": "properties.softDeleteRetentionInDays", "type": "int"}, + "v1_legacy_mode": {"key": "properties.v1LegacyMode", "type": "bool"}, + } + + def __init__(self, **kwargs): """ :keyword identity: Managed service identity (system assigned and/or user assigned identities). :paramtype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity @@ -35827,23 +33863,23 @@ def __init__( :paramtype v1_legacy_mode: bool """ super(WorkspaceUpdateParameters, self).__init__(**kwargs) - self.identity = kwargs.get('identity', None) - self.sku = kwargs.get('sku', None) - self.tags = kwargs.get('tags', None) - self.application_insights = kwargs.get('application_insights', None) - self.container_registry = kwargs.get('container_registry', None) - self.description = kwargs.get('description', None) - self.enable_data_isolation = kwargs.get('enable_data_isolation', None) - self.enable_software_bill_of_materials = kwargs.get('enable_software_bill_of_materials', None) - self.encryption = kwargs.get('encryption', None) - self.feature_store_settings = kwargs.get('feature_store_settings', None) - self.friendly_name = kwargs.get('friendly_name', None) - self.image_build_compute = kwargs.get('image_build_compute', None) - self.ip_allowlist = kwargs.get('ip_allowlist', None) - self.managed_network = kwargs.get('managed_network', None) - self.primary_user_assigned_identity = kwargs.get('primary_user_assigned_identity', None) - self.public_network_access = kwargs.get('public_network_access', None) - self.serverless_compute_settings = kwargs.get('serverless_compute_settings', None) - self.service_managed_resources_settings = kwargs.get('service_managed_resources_settings', None) - self.soft_delete_retention_in_days = kwargs.get('soft_delete_retention_in_days', None) - self.v1_legacy_mode = kwargs.get('v1_legacy_mode', None) + self.identity = kwargs.get("identity", None) + self.sku = kwargs.get("sku", None) + self.tags = kwargs.get("tags", None) + self.application_insights = kwargs.get("application_insights", None) + self.container_registry = kwargs.get("container_registry", None) + self.description = kwargs.get("description", None) + self.enable_data_isolation = kwargs.get("enable_data_isolation", None) + self.enable_software_bill_of_materials = kwargs.get("enable_software_bill_of_materials", None) + self.encryption = kwargs.get("encryption", None) + self.feature_store_settings = kwargs.get("feature_store_settings", None) + self.friendly_name = kwargs.get("friendly_name", None) + self.image_build_compute = kwargs.get("image_build_compute", None) + self.ip_allowlist = kwargs.get("ip_allowlist", None) + self.managed_network = kwargs.get("managed_network", None) + self.primary_user_assigned_identity = kwargs.get("primary_user_assigned_identity", None) + self.public_network_access = kwargs.get("public_network_access", None) + self.serverless_compute_settings = kwargs.get("serverless_compute_settings", None) + self.service_managed_resources_settings = kwargs.get("service_managed_resources_settings", None) + self.soft_delete_retention_in_days = kwargs.get("soft_delete_retention_in_days", None) + self.v1_legacy_mode = kwargs.get("v1_legacy_mode", None) diff --git a/src/promptflow/promptflow/azure/_models/_version.py b/src/promptflow/promptflow/core/_connection_provider/_models/_version.py similarity index 100% rename from src/promptflow/promptflow/azure/_models/_version.py rename to src/promptflow/promptflow/core/_connection_provider/_models/_version.py diff --git a/src/promptflow/promptflow/core/_connection_provider/_utils.py b/src/promptflow/promptflow/core/_connection_provider/_utils.py new file mode 100644 index 00000000000..1e6e638278a --- /dev/null +++ b/src/promptflow/promptflow/core/_connection_provider/_utils.py @@ -0,0 +1,48 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +from promptflow.core._errors import MissingRequiredPackage +from promptflow.exceptions import ValidationException + + +def _check_required_packages(): + try: + import azure.ai.ml # noqa: F401 + import azure.identity # noqa: F401 + except ImportError as e: + raise MissingRequiredPackage( + message="Please install 'azure-identity>=1.12.0,<2.0.0' and 'azure-ai-ml' to use workspace connection." + ) from e + + +def get_arm_token(credential) -> str: + _check_required_packages() + from azure.ai.ml._azure_environments import _get_base_url_from_metadata + + resource = _get_base_url_from_metadata() + return get_token(credential, resource) + + +def get_token(credential, resource) -> str: + _check_required_packages() + from azure.ai.ml._azure_environments import _resource_to_scopes + + azure_ml_scopes = _resource_to_scopes(resource) + token = credential.get_token(*azure_ml_scopes).token + # validate token has aml audience + import jwt # Included by azure-identity + + decoded_token = jwt.decode( + token, + options={"verify_signature": False, "verify_aud": False}, + ) + if decoded_token.get("aud") != resource: + msg = """AAD token with aml scope could not be fetched using the credentials being used. + Please validate if token with {0} scope can be fetched using credentials provided to PFClient. + Token with {0} scope can be fetched using credentials.get_token({0}) + """ + raise ValidationException( + message=msg.format(*azure_ml_scopes), + ) + + return token diff --git a/src/promptflow/promptflow/core/_connection_provider/_workspace_connection_provider.py b/src/promptflow/promptflow/core/_connection_provider/_workspace_connection_provider.py new file mode 100644 index 00000000000..51f492d767c --- /dev/null +++ b/src/promptflow/promptflow/core/_connection_provider/_workspace_connection_provider.py @@ -0,0 +1,303 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +from typing import Any, Optional, Union + +import requests + +from promptflow._constants import ConnectionAuthMode +from promptflow._utils.retry_utils import http_retry_wrapper +from promptflow.core._connection import CustomConnection, _Connection +from promptflow.core._errors import ( + AccessDeniedError, + BuildConnectionError, + MissingRequiredPackage, + OpenURLFailed, + OpenURLFailedUserError, + OpenURLUserAuthenticationError, + UnknownConnectionType, + UnsupportedConnectionAuthType, +) +from promptflow.exceptions import ErrorTarget, SystemErrorException, UserErrorException + +from ._connection_provider import ConnectionProvider + +GET_CONNECTION_URL = ( + "/subscriptions/{sub}/resourcegroups/{rg}/providers/Microsoft.MachineLearningServices" + "/workspaces/{ws}/connections/{name}/listsecrets?api-version=2023-04-01-preview" +) +LIST_CONNECTION_URL = ( + "/subscriptions/{sub}/resourcegroups/{rg}/providers/Microsoft.MachineLearningServices" + "/workspaces/{ws}/connections?api-version=2023-04-01-preview" +) +FLOW_META_PREFIX = "azureml.flow." + + +# Note: We define the category and auth type here because newly added enum values may +# depend on azure-ai-ml package update, which is not in our control. +class ConnectionCategory: + AzureOpenAI = "AzureOpenAI" + CognitiveSearch = "CognitiveSearch" + CognitiveService = "CognitiveService" + CustomKeys = "CustomKeys" + OpenAI = "OpenAI" + Serp = "Serp" + Serverless = "Serverless" + BingLLMSearch = "BingLLMSearch" + + +class ConnectionAuthType: + ApiKey = "ApiKey" + AAD = "AAD" + + +def get_case_insensitive_key(d, key, default=None): + for k, v in d.items(): + if k.lower() == key.lower(): + return v + return default + + +class WorkspaceConnectionProvider(ConnectionProvider): + def __init__( + self, + subscription_id: Optional[str] = None, + resource_group_name: Optional[str] = None, + workspace_name: Optional[str] = None, + credential=None, + ): + self.credential = credential + self.subscription_id = subscription_id + self.resource_group_name = resource_group_name + self.workspace_name = workspace_name + + @classmethod + def open_url(cls, token, url, action, host="management.azure.com", method="GET", model=None) -> Union[Any, dict]: + """ + :type token: str + :type url: str + :type action: str, for the error message format. + :type host: str + :type method: str + :type model: Type[msrest.serialization.Model] + """ + headers = {"Authorization": f"Bearer {token}"} + response = http_retry_wrapper(requests.request)(method, f"https://{host}{url}", headers=headers) + message_format = ( + f"Open url {{url}} failed with status code: {response.status_code}, action: {action}, reason: {{reason}}" + ) + if response.status_code == 403: + raise AccessDeniedError(operation=url, target=ErrorTarget.RUNTIME) + elif 400 <= response.status_code < 500: + raise OpenURLFailedUserError( + message_format=message_format, + url=url, + reason=response.reason, + ) + elif response.status_code != 200: + raise OpenURLFailed( + message_format=message_format, + url=url, + reason=response.reason, + ) + data = response.json() + if model: + return model.deserialize(data) + return data + + @classmethod + def validate_and_fallback_connection_type(cls, name, type_name, category, metadata): + if type_name: + return type_name + # Below category has corresponding connection type in PromptFlow, so we can fall back directly. + # Note: CustomKeys may store different connection types for now, e.g. openai, serp. + if category in [ + ConnectionCategory.AzureOpenAI, + ConnectionCategory.CognitiveSearch, + ConnectionCategory.Serverless, + ]: + return category + if category == ConnectionCategory.CognitiveService: + kind = get_case_insensitive_key(metadata, "Kind") + if kind == "Content Safety": + return "AzureContentSafety" + if kind == "Form Recognizer": + return "FormRecognizer" + raise UnknownConnectionType( + message_format="Connection {name} is not recognized in PromptFlow, " + "please make sure the connection is created in PromptFlow.", + category=category, + name=name, + ) + + @classmethod + def build_connection_dict_from_rest_object(cls, name, obj) -> dict: + """ + :type name: str + :type obj: azure.ai.ml._restclient.v2023_06_01_preview.models.WorkspaceConnectionPropertiesV2BasicResource + """ + # Reference 1: https://msdata.visualstudio.com/Vienna/_git/vienna?path=/src/azureml-api/src/AccountRP/Contracts/WorkspaceConnection/WorkspaceConnectionDtoV2.cs&_a=blame&version=GBmaster # noqa: E501 + # Reference 2: https://msdata.visualstudio.com/Vienna/_git/vienna?path=%2Fsrc%2Fazureml-api%2Fsrc%2FDesigner%2Fsrc%2FMiddleTier%2FMiddleTier%2FServices%2FPromptFlow%2FConnectionsManagement.cs&version=GBmaster&_a=contents # noqa: E501 + # This connection type covers the generic ApiKey auth connection categories, for examples: + # AzureOpenAI: + # Category:= AzureOpenAI + # AuthType:= ApiKey (as type discriminator) + # Credentials:= {ApiKey} as + # Target:= {ApiBase} + # + # CognitiveService: + # Category:= CognitiveService + # AuthType:= ApiKey (as type discriminator) + # Credentials:= {SubscriptionKey} as + # Target:= ServiceRegion={serviceRegion} + # + # CognitiveSearch: + # Category:= CognitiveSearch + # AuthType:= ApiKey (as type discriminator) + # Credentials:= {Key} as + # Target:= {Endpoint} + # + # Use Metadata property bag for ApiType, ApiVersion, Kind and other metadata fields + properties = obj.properties + type_name = get_case_insensitive_key(properties.metadata, f"{FLOW_META_PREFIX}connection_type") + type_name = cls.validate_and_fallback_connection_type(name, type_name, properties.category, properties.metadata) + module = get_case_insensitive_key(properties.metadata, f"{FLOW_META_PREFIX}module", "promptflow.connections") + # Note: Category is connectionType in MT, but type name should be class name, which is flowValueType in MT. + # Handle old connections here, see details: https://github.com/Azure/promptflow/tree/main/connections + type_name = f"{type_name}Connection" if not type_name.endswith("Connection") else type_name + meta = {"type": type_name, "module": module} + + def get_auth_config(props): + unsupported_message = "Unsupported connection auth type %r, supported types are 'ApiKey' and 'AAD'." + if not isinstance(props.auth_type, str): + raise UnsupportedConnectionAuthType(message=unsupported_message % props.auth_type) + if props.auth_type.lower() == ConnectionAuthType.ApiKey.lower(): + return {"api_key": props.credentials.key, "auth_mode": ConnectionAuthMode.KEY} + elif props.auth_type.lower() == ConnectionAuthType.AAD.lower(): + return {"api_key": None, "auth_mode": ConnectionAuthMode.MEID_TOKEN} + raise UnsupportedConnectionAuthType(message=unsupported_message % props.auth_type) + + if properties.category == ConnectionCategory.AzureOpenAI: + value = { + **get_auth_config(properties), + "api_base": properties.target, + "api_type": get_case_insensitive_key(properties.metadata, "ApiType"), + "api_version": get_case_insensitive_key(properties.metadata, "ApiVersion"), + } + # Note: Resource id is required in some cloud scenario, which is not exposed on sdk/cli entity. + resource_id = get_case_insensitive_key(properties.metadata, "ResourceId") + if resource_id: + value["resource_id"] = resource_id + elif properties.category == ConnectionCategory.CognitiveSearch: + value = { + **get_auth_config(properties), + "api_base": properties.target, + "api_version": get_case_insensitive_key(properties.metadata, "ApiVersion"), + } + elif properties.category == ConnectionCategory.Serverless: + value = { + **get_auth_config(properties), + "api_base": properties.target, + } + elif properties.category == ConnectionCategory.CognitiveService: + value = { + **get_auth_config(properties), + "endpoint": properties.target, + "api_version": get_case_insensitive_key(properties.metadata, "ApiVersion"), + } + elif properties.category == ConnectionCategory.CustomKeys: + # Merge secrets from credentials.keys and other string fields from metadata + value = { + **properties.credentials.keys, + **{k: v for k, v in properties.metadata.items() if not k.startswith(FLOW_META_PREFIX)}, + } + if type_name == CustomConnection.__name__: + meta["secret_keys"] = list(properties.credentials.keys.keys()) + else: + raise UnknownConnectionType( + message_format=( + "Unknown connection {name} category {category}, " + "please upgrade your promptflow sdk version and retry." + ), + category=properties.category, + name=name, + ) + # Note: Filter empty values out to ensure default values can be picked when init class object. + return {**meta, "value": {k: v for k, v in value.items() if v}} + + @classmethod + def _build_connection_dict(cls, name, subscription_id, resource_group_name, workspace_name, credential) -> dict: + """ + :type name: str + """ + url = GET_CONNECTION_URL.format( + sub=subscription_id, + rg=resource_group_name, + ws=workspace_name, + name=name, + ) + # Note: There is a try-catch in get arm token. It requires azure-ai-ml. + # TODO: Remove the azure-ai-ml dependency. + from ._utils import get_arm_token + + try: + from azure.core.exceptions import ClientAuthenticationError + + from ._models import WorkspaceConnectionPropertiesV2BasicResource + except ImportError as e: + raise MissingRequiredPackage( + message="Please install 'azure-identity>=1.12.0,<2.0.0' and 'msrest' to use workspace connection." + ) from e + try: + rest_obj: WorkspaceConnectionPropertiesV2BasicResource = cls.open_url( + get_arm_token(credential=credential), + url=url, + action="listsecrets", + method="POST", + model=WorkspaceConnectionPropertiesV2BasicResource, + ) + except AccessDeniedError: + auth_error_message = ( + "Access denied to list workspace secret due to invalid authentication. " + "Please ensure you have gain RBAC role 'Azure Machine Learning Workspace Connection Secrets Reader' " + "for current workspace, and wait for a few minutes to make sure the new role takes effect. " + ) + raise OpenURLUserAuthenticationError(message=auth_error_message) + except ClientAuthenticationError as e: + raise UserErrorException(target=ErrorTarget.CORE, message=str(e), error=e) + except Exception as e: + raise SystemErrorException(target=ErrorTarget.CORE, message=str(e), error=e) + + try: + return cls.build_connection_dict_from_rest_object(name, rest_obj) + except Exception as e: + raise BuildConnectionError( + message_format=f"Build connection dict for connection {{name}} failed with {e}.", + name=name, + ) + + @classmethod + def _convert_to_connection_dict(cls, conn_name, conn_data): + try: + from ._models import WorkspaceConnectionPropertiesV2BasicResource + except ImportError as e: + raise MissingRequiredPackage(message="Please install 'msrest' to use workspace connection.") from e + try: + rest_obj = WorkspaceConnectionPropertiesV2BasicResource.deserialize(conn_data) + conn_dict = cls.build_connection_dict_from_rest_object(conn_name, rest_obj) + return conn_dict + except Exception as e: + raise BuildConnectionError( + message_format=f"Build connection dict for connection {{name}} failed with {e}.", + name=conn_name, + ) + + def get(self, name: str): + connection_dict = self._build_connection_dict( + name, + subscription_id=self.subscription_id, + resource_group_name=self.resource_group_name, + workspace_name=self.workspace_name, + credential=self.credential, + ) + return _Connection._from_execution_connection_dict(name=name, data=connection_dict) diff --git a/src/promptflow/promptflow/core/_errors.py b/src/promptflow/promptflow/core/_errors.py index d1eb09f8ed5..f3f069f08c4 100644 --- a/src/promptflow/promptflow/core/_errors.py +++ b/src/promptflow/promptflow/core/_errors.py @@ -43,3 +43,51 @@ class RequiredEnvironmentVariablesNotSetError(CoreError): def __init__(self, env_vars: list, cls_name: str): super().__init__(f"Required environment variables {env_vars} to build {cls_name} not set.") + + +class OpenURLFailed(SystemErrorException): + def __init__(self, **kwargs): + super().__init__(target=ErrorTarget.CORE, **kwargs) + + +class BuildConnectionError(SystemErrorException): + def __init__(self, **kwargs): + super().__init__(target=ErrorTarget.CORE, **kwargs) + + +class MissingRequiredPackage(UserErrorException): + def __init__(self, **kwargs): + super().__init__(target=ErrorTarget.CORE, **kwargs) + + +class UserAuthenticationError(UserErrorException): + """Exception raised when user authentication failed""" + + pass + + +class OpenURLUserAuthenticationError(UserAuthenticationError): + def __init__(self, **kwargs): + super().__init__(target=ErrorTarget.CORE, **kwargs) + + +class OpenURLFailedUserError(UserErrorException): + def __init__(self, **kwargs): + super().__init__(target=ErrorTarget.CORE, **kwargs) + + +class UnknownConnectionType(UserErrorException): + def __init__(self, **kwargs): + super().__init__(target=ErrorTarget.CORE, **kwargs) + + +class UnsupportedConnectionAuthType(UserErrorException): + def __init__(self, **kwargs): + super().__init__(target=ErrorTarget.CORE, **kwargs) + + +class AccessDeniedError(UserErrorException): + """Exception raised when run info can not be found in storage""" + + def __init__(self, operation: str, target: ErrorTarget): + super().__init__(message=f"Access is denied to perform operation {operation!r}", target=target) diff --git a/src/promptflow/promptflow/core/_serving/extension/azureml_extension.py b/src/promptflow/promptflow/core/_serving/extension/azureml_extension.py index 25494061b31..45d7ad1ae1f 100644 --- a/src/promptflow/promptflow/core/_serving/extension/azureml_extension.py +++ b/src/promptflow/promptflow/core/_serving/extension/azureml_extension.py @@ -90,9 +90,11 @@ def get_override_connections(self, flow: Flow) -> Tuple[dict, dict]: if data_override: try: # try best to convert to connection, this is only for azureml deployment. - from promptflow.azure.operations._arm_connection_operations import ArmConnectionOperations + from promptflow.core._connection_provider._workspace_connection_provider import ( + WorkspaceConnectionProvider, + ) - conn = ArmConnectionOperations._convert_to_connection_dict(connection_name, conn_data) + conn = WorkspaceConnectionProvider._convert_to_connection_dict(connection_name, conn_data) connections[connection_name] = conn except Exception as e: self.logger.warn(f"Failed to convert connection data to connection: {e}") diff --git a/src/promptflow/tests/sdk_cli_azure_test/e2etests/test_arm_connection_operations.py b/src/promptflow/tests/sdk_cli_azure_test/e2etests/test_arm_connection_operations.py index 02867d29f9c..d4f1dea9bec 100644 --- a/src/promptflow/tests/sdk_cli_azure_test/e2etests/test_arm_connection_operations.py +++ b/src/promptflow/tests/sdk_cli_azure_test/e2etests/test_arm_connection_operations.py @@ -19,21 +19,11 @@ class TestArmConnectionOperations: def test_get_connection(self, connection_ops): # Note: Secrets will be returned by arm api result = connection_ops.get(name="azure_open_ai_connection") - assert ( - result._to_dict().items() - > { - "api_type": "azure", - "module": "promptflow.connections", - "name": "azure_open_ai_connection", - }.items() - ) + assert result.name == "azure_open_ai_connection" + assert result.api_type == "azure" + assert result.module == "promptflow.connections" result = connection_ops.get(name="custom_connection") - assert ( - result._to_dict().items() - > { - "name": "custom_connection", - "module": "promptflow.connections", - "configs": {}, - }.items() - ) + assert result.name == "custom_connection" + assert result.configs == {} + assert result.module == "promptflow.connections" diff --git a/src/promptflow/tests/sdk_cli_azure_test/unittests/test_arm_connection_build.py b/src/promptflow/tests/sdk_cli_azure_test/unittests/test_arm_connection_build.py index f23062b460a..40ed94627a0 100644 --- a/src/promptflow/tests/sdk_cli_azure_test/unittests/test_arm_connection_build.py +++ b/src/promptflow/tests/sdk_cli_azure_test/unittests/test_arm_connection_build.py @@ -6,12 +6,12 @@ def build_from_data_and_assert(data, expected): - from promptflow.azure._models._models import WorkspaceConnectionPropertiesV2BasicResource - from promptflow.azure.operations._arm_connection_operations import ArmConnectionOperations + from promptflow.core._connection_provider._models._models import WorkspaceConnectionPropertiesV2BasicResource + from promptflow.core._connection_provider._workspace_connection_provider import WorkspaceConnectionProvider data = copy.deepcopy(data) obj = WorkspaceConnectionPropertiesV2BasicResource.deserialize(data) - assert ArmConnectionOperations.build_connection_dict_from_rest_object("mock", obj) == expected + assert WorkspaceConnectionProvider.build_connection_dict_from_rest_object("mock", obj) == expected @pytest.mark.unittest diff --git a/src/promptflow/tests/sdk_cli_test/.coveragerc b/src/promptflow/tests/sdk_cli_test/.coveragerc index 1a9649c810b..779ff0e9a37 100644 --- a/src/promptflow/tests/sdk_cli_test/.coveragerc +++ b/src/promptflow/tests/sdk_cli_test/.coveragerc @@ -6,4 +6,7 @@ source = omit = */promptflow/azure/_restclient/* */promptflow/azure/_models/* + */promptflow/core/_connection_provider/_models* + */promptflow/executor/* *__init__.py* + */promptflow/_sdk/_serving/* diff --git a/src/promptflow/tests/sdk_pfs_test/.coveragerc b/src/promptflow/tests/sdk_pfs_test/.coveragerc index 841a4d9503c..551f5583885 100644 --- a/src/promptflow/tests/sdk_pfs_test/.coveragerc +++ b/src/promptflow/tests/sdk_pfs_test/.coveragerc @@ -6,4 +6,6 @@ omit = */promptflow/azure/* */promptflow/entities/* */promptflow/operations/* + */promptflow/executor/* + */promptflow/core/* *__init__.py*