From 3e73647847c5689c1660a5620d0944447bbd9523 Mon Sep 17 00:00:00 2001 From: Joongi Kim Date: Thu, 2 Jan 2025 21:58:19 +0900 Subject: [PATCH 1/9] chore: Upgrade mypy to 1.14.1 --- tools/mypy-requirements.txt | 2 +- tools/mypy.lock | 43 ++++++++++++++++--------------------- 2 files changed, 20 insertions(+), 25 deletions(-) diff --git a/tools/mypy-requirements.txt b/tools/mypy-requirements.txt index 787ccca30c..13ef9a01af 100644 --- a/tools/mypy-requirements.txt +++ b/tools/mypy-requirements.txt @@ -1,2 +1,2 @@ -mypy==1.13.0 +mypy==1.14.1 pydantic~=2.9.2 diff --git a/tools/mypy.lock b/tools/mypy.lock index c6e52bf418..549d94bb44 100644 --- a/tools/mypy.lock +++ b/tools/mypy.lock @@ -9,7 +9,7 @@ // "CPython==3.12.8" // ], // "generated_with_requirements": [ -// "mypy==1.13.0", +// "mypy==1.14.1", // "pydantic~=2.9.2" // ], // "manylinux": "manylinux2014", @@ -53,58 +53,53 @@ "artifacts": [ { "algorithm": "sha256", - "hash": "9c250883f9fd81d212e0952c92dbfcc96fc237f4b7c92f56ac81fd48460b3e5a", - "url": "https://files.pythonhosted.org/packages/3b/86/72ce7f57431d87a7ff17d442f521146a6585019eb8f4f31b7c02801f78ad/mypy-1.13.0-py3-none-any.whl" + "hash": "b66a60cc4073aeb8ae00057f9c1f64d49e90f918fbcef9a977eb121da8b8f1d1", + "url": "https://files.pythonhosted.org/packages/a0/b5/32dd67b69a16d088e533962e5044e51004176a9952419de0370cdaead0f8/mypy-1.14.1-py3-none-any.whl" }, { "algorithm": "sha256", - "hash": "a4c1bfcdbce96ff5d96fc9b08e3831acb30dc44ab02671eca5953eadad07d6d0", - "url": "https://files.pythonhosted.org/packages/1f/17/b1018c6bb3e9f1ce3956722b3bf91bff86c1cefccca71cec05eae49d6d41/mypy-1.13.0-cp312-cp312-musllinux_1_1_x86_64.whl" + "hash": "fad79bfe3b65fe6a1efaed97b445c3d37f7be9fdc348bdb2d7cac75579607c89", + "url": "https://files.pythonhosted.org/packages/03/d2/8bc0aeaaf2e88c977db41583559319f1821c069e943ada2701e86d0430b7/mypy-1.14.1-cp312-cp312-musllinux_1_2_x86_64.whl" }, { "algorithm": "sha256", - "hash": "39bb21c69a5d6342f4ce526e4584bc5c197fd20a60d14a8624d8743fffb9472e", - "url": "https://files.pythonhosted.org/packages/83/67/b7419c6b503679d10bd26fc67529bc6a1f7a5f220bbb9f292dc10d33352f/mypy-1.13.0-cp312-cp312-macosx_11_0_arm64.whl" + "hash": "553c293b1fbdebb6c3c4030589dab9fafb6dfa768995a453d8a5d3b23784af2e", + "url": "https://files.pythonhosted.org/packages/04/90/f53971d3ac39d8b68bbaab9a4c6c58c8caa4d5fd3d587d16f5927eeeabe1/mypy-1.14.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl" }, { "algorithm": "sha256", - "hash": "164f28cb9d6367439031f4c81e84d3ccaa1e19232d9d05d37cb0bd880d3f93c2", - "url": "https://files.pythonhosted.org/packages/ba/07/37d67048786ae84e6612575e173d713c9a05d0ae495dde1e68d972207d98/mypy-1.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl" + "hash": "30ff5ef8519bbc2e18b3b54521ec319513a26f1bba19a7582e7b1f58a6e69f14", + "url": "https://files.pythonhosted.org/packages/43/1b/b38c079609bb4627905b74fc6a49849835acf68547ac33d8ceb707de5f52/mypy-1.14.1-cp312-cp312-macosx_10_13_x86_64.whl" }, { "algorithm": "sha256", - "hash": "0291a61b6fbf3e6673e3405cfcc0e7650bebc7939659fdca2702958038bd835e", - "url": "https://files.pythonhosted.org/packages/e8/21/7e9e523537991d145ab8a0a2fd98548d67646dc2aaaf6091c31ad883e7c1/mypy-1.13.0.tar.gz" + "hash": "cb9f255c18052343c70234907e2e532bc7e55a62565d64536dbc7706a20b78b9", + "url": "https://files.pythonhosted.org/packages/6b/75/2ed0d2964c1ffc9971c729f7a544e9cd34b2cdabbe2d11afd148d7838aa2/mypy-1.14.1-cp312-cp312-macosx_11_0_arm64.whl" }, { "algorithm": "sha256", - "hash": "5c7051a3461ae84dfb5dd15eff5094640c61c5f22257c8b766794e6dd85e72d5", - "url": "https://files.pythonhosted.org/packages/fb/31/c526a7bd2e5c710ae47717c7a5f53f616db6d9097caf48ad650581e81748/mypy-1.13.0-cp312-cp312-macosx_10_13_x86_64.whl" + "hash": "8b4e3413e0bddea671012b063e27591b953d653209e7a4fa5e48759cda77ca11", + "url": "https://files.pythonhosted.org/packages/a1/5f/7b8051552d4da3c51bbe8fcafffd76a6823779101a2b198d80886cd8f08e/mypy-1.14.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl" }, { "algorithm": "sha256", - "hash": "1b723826c0ac659fe0ec6323323324f025d32a1966ba58e3a72c666ab23a6ede", - "url": "https://media.githubusercontent.com/media/lablup/backend.ai-oven/main/pypi/projects/mypy/mypy-1.13.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl" - }, - { - "algorithm": "sha256", - "hash": "13cbde8f3bbe3a019313cc21ea7174ff5f6c0326ed8a303451b6781e07ce5ecb", - "url": "https://media.githubusercontent.com/media/lablup/backend.ai-oven/main/pypi/projects/mypy/mypy-1.13.0-cp312-cp312-musllinux_1_1_aarch64.whl" + "hash": "7ec88144fe9b510e8475ec2f5f251992690fcf89ccb4500b214b4226abcd32d6", + "url": "https://files.pythonhosted.org/packages/b9/eb/2c92d8ea1e684440f54fa49ac5d9a5f19967b7b472a281f419e69a8d228e/mypy-1.14.1.tar.gz" } ], "project_name": "mypy", "requires_dists": [ "lxml; extra == \"reports\"", - "mypy-extensions>=1.0.0", + "mypy_extensions>=1.0.0", "orjson; extra == \"faster-cache\"", "pip; extra == \"install-types\"", "psutil>=4.0; extra == \"dmypy\"", "setuptools>=50; extra == \"mypyc\"", "tomli>=1.1.0; python_version < \"3.11\"", - "typing-extensions>=4.6.0" + "typing_extensions>=4.6.0" ], "requires_python": ">=3.8", - "version": "1.13.0" + "version": "1.14.1" }, { "artifacts": [ @@ -244,7 +239,7 @@ "pip_version": "24.1.2", "prefer_older_binary": false, "requirements": [ - "mypy==1.13.0", + "mypy==1.14.1", "pydantic~=2.9.2" ], "requires_python": [ From d3e3c2159730ef3a19263d5036220e8129988273 Mon Sep 17 00:00:00 2001 From: Joongi Kim Date: Thu, 2 Jan 2025 22:06:02 +0900 Subject: [PATCH 2/9] fix: Remove unncessary type stub which makes mypy to complain with zero-member enum in type stubs --- src/ai/backend/common/enum_extension.pyi | 30 ------------------------ 1 file changed, 30 deletions(-) delete mode 100644 src/ai/backend/common/enum_extension.pyi diff --git a/src/ai/backend/common/enum_extension.pyi b/src/ai/backend/common/enum_extension.pyi deleted file mode 100644 index f3a5cae0ce..0000000000 --- a/src/ai/backend/common/enum_extension.pyi +++ /dev/null @@ -1,30 +0,0 @@ -import enum - -class StringSetFlag(enum.StrEnum): - def __eq__(self, other: object) -> bool: ... - def __hash__(self) -> int: ... - def __or__( # type: ignore[override] - self, - other: StringSetFlag | str | set[str] | frozenset[str], - ) -> set[str]: ... - def __and__( # type: ignore[override] - self, - other: StringSetFlag | str | set[str] | frozenset[str], - ) -> bool: ... - def __xor__( # type: ignore[override] - self, - other: StringSetFlag | str | set[str] | frozenset[str], - ) -> set[str]: ... - def __ror__( # type: ignore[override] - self, - other: StringSetFlag | str | set[str] | frozenset[str], - ) -> set[str]: ... - def __rand__( # type: ignore[override] - self, - other: StringSetFlag | str | set[str] | frozenset[str], - ) -> bool: ... - def __rxor__( # type: ignore[override] - self, - other: StringSetFlag | str | set[str] | frozenset[str], - ) -> set[str]: ... - def __str__(self) -> str: ... From 7bbe7c637573aef2d394f10b7e58e12dacb98506 Mon Sep 17 00:00:00 2001 From: Joongi Kim Date: Thu, 2 Jan 2025 22:19:50 +0900 Subject: [PATCH 3/9] fix: Newly found type error in agent.server --- src/ai/backend/agent/server.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/ai/backend/agent/server.py b/src/ai/backend/agent/server.py index c29fd8110b..5ea85868f7 100644 --- a/src/ai/backend/agent/server.py +++ b/src/ai/backend/agent/server.py @@ -13,8 +13,7 @@ import sys from collections import OrderedDict, defaultdict from datetime import datetime, timezone -from ipaddress import _BaseAddress as BaseIPAddress -from ipaddress import ip_network +from ipaddress import IPv4Address, IPv6Address, ip_network from pathlib import Path from pprint import pformat, pprint from typing import ( @@ -1155,7 +1154,9 @@ def main( raise click.Abort() rpc_host = cfg["agent"]["rpc-listen-addr"].host - if isinstance(rpc_host, BaseIPAddress) and (rpc_host.is_unspecified or rpc_host.is_link_local): + if isinstance(rpc_host, (IPv4Address, IPv6Address)) and ( + rpc_host.is_unspecified or rpc_host.is_link_local + ): print( "ConfigurationError: " "Cannot use link-local or unspecified IP address as the RPC listening host.", From 6b9d2ebfcd9a5e93945261514d0a0cffba698274 Mon Sep 17 00:00:00 2001 From: Joongi Kim Date: Thu, 2 Jan 2025 22:29:47 +0900 Subject: [PATCH 4/9] fix: Newly found type error in accelerator.cuda_open.nvidia --- src/ai/backend/accelerator/cuda_open/nvidia.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/src/ai/backend/accelerator/cuda_open/nvidia.py b/src/ai/backend/accelerator/cuda_open/nvidia.py index 2a79685b3a..aa7193e9cb 100644 --- a/src/ai/backend/accelerator/cuda_open/nvidia.py +++ b/src/ai/backend/accelerator/cuda_open/nvidia.py @@ -1,9 +1,10 @@ import ctypes import platform from abc import ABCMeta, abstractmethod +from collections.abc import MutableMapping, Sequence from itertools import groupby from operator import itemgetter -from typing import Any, MutableMapping, NamedTuple, Tuple, TypeAlias +from typing import Any, NamedTuple, TypeAlias, cast # ref: https://developer.nvidia.com/cuda-toolkit-archive TARGET_CUDA_VERSIONS = ( @@ -487,7 +488,7 @@ def load_library(cls): return None @classmethod - def get_version(cls) -> Tuple[int, int]: + def get_version(cls) -> tuple[int, int]: if cls._version == (0, 0): raw_ver = ctypes.c_int() cls.invoke("cudaRuntimeGetVersion", ctypes.byref(raw_ver)) @@ -513,7 +514,9 @@ def get_device_props(cls, device_idx: int): props_struct = cudaDeviceProp() cls.invoke("cudaGetDeviceProperties", ctypes.byref(props_struct), device_idx) props: MutableMapping[str, Any] = { - k: getattr(props_struct, k) for k, _ in props_struct._fields_ + # Treat each field as two-tuple assuming that we don't have bit-fields + k: getattr(props_struct, k) + for k, _ in cast(Sequence[tuple[str, Any]], props_struct._fields_) } pci_bus_id = b" " * 16 cls.invoke("cudaDeviceGetPCIBusId", ctypes.c_char_p(pci_bus_id), 16, device_idx) From 6b8ff6e7c2cc51f9e94a90d2e69833a820507795 Mon Sep 17 00:00:00 2001 From: Joongi Kim Date: Thu, 2 Jan 2025 22:33:55 +0900 Subject: [PATCH 5/9] chore: Upgrade ruff to 0.8.5 --- tools/ruff-requirements.txt | 4 +-- tools/ruff.lock | 70 ++++++++++++++++++------------------- 2 files changed, 37 insertions(+), 37 deletions(-) diff --git a/tools/ruff-requirements.txt b/tools/ruff-requirements.txt index cc4fdc7584..02e0ac463d 100644 --- a/tools/ruff-requirements.txt +++ b/tools/ruff-requirements.txt @@ -1,2 +1,2 @@ -ruff~=0.6.4 -ruff-lsp~=0.0.56 +ruff~=0.8.5 +ruff-lsp~=0.0.59 diff --git a/tools/ruff.lock b/tools/ruff.lock index dffae63262..00f2537552 100644 --- a/tools/ruff.lock +++ b/tools/ruff.lock @@ -9,8 +9,8 @@ // "CPython==3.12.8" // ], // "generated_with_requirements": [ -// "ruff-lsp~=0.0.56", -// "ruff~=0.6.4" +// "ruff-lsp~=0.0.59", +// "ruff~=0.8.5" // ], // "manylinux": "manylinux2014", // "requirement_constraints": [], @@ -183,84 +183,84 @@ "artifacts": [ { "algorithm": "sha256", - "hash": "925d26471fa24b0ce5a6cdfab1bb526fb4159952385f386bdcc643813d472039", - "url": "https://files.pythonhosted.org/packages/98/b6/be0a1ddcbac65a30c985cf7224c4fce786ba2c51e7efeb5178fe410ed3cf/ruff-0.6.9-py3-none-musllinux_1_2_x86_64.whl" + "hash": "03a90200c5dfff49e4c967b405f27fdfa81594cbb7c5ff5609e42d7fe9680da5", + "url": "https://files.pythonhosted.org/packages/0d/d6/78a9af8209ad99541816d74f01ce678fc01ebb3f37dd7ab8966646dcd92b/ruff-0.8.5-py3-none-musllinux_1_2_x86_64.whl" }, { "algorithm": "sha256", - "hash": "55bb01caeaf3a60b2b2bba07308a02fca6ab56233302406ed5245180a05c5625", - "url": "https://files.pythonhosted.org/packages/00/52/dc311775e7b5f5b19831563cb1572ecce63e62681bccc609867711fae317/ruff-0.6.9-py3-none-musllinux_1_2_i686.whl" + "hash": "622b82bf3429ff0e346835ec213aec0a04d9730480cbffbb6ad9372014e31bbd", + "url": "https://files.pythonhosted.org/packages/00/39/4f83e517ec173e16a47c6d102cd22a1aaebe80e1208a1f2e83ab9a0e4134/ruff-0.8.5-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl" }, { "algorithm": "sha256", - "hash": "417b81aa1c9b60b2f8edc463c58363075412866ae4e2b9ab0f690dc1e87ac1b5", - "url": "https://files.pythonhosted.org/packages/13/34/a40ff8ae62fb1b26fb8e6fa7e64bc0e0a834b47317880de22edd6bfb54fb/ruff-0.6.9-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl" + "hash": "762f113232acd5b768d6b875d16aad6b00082add40ec91c927f0673a8ec4ede8", + "url": "https://files.pythonhosted.org/packages/17/47/8f9514b670969aab57c5fc826fb500a16aee8feac1bcf8a91358f153a5ba/ruff-0.8.5-py3-none-musllinux_1_2_i686.whl" }, { "algorithm": "sha256", - "hash": "eae02b700763e3847595b9d2891488989cac00214da7f845f4bcf2989007d577", - "url": "https://files.pythonhosted.org/packages/13/d7/def9e5f446d75b9a9c19b24231a3a658c075d79163b08582e56fa5dcfa38/ruff-0.6.9-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl" + "hash": "f99be814d77a5dac8a8957104bdd8c359e85c86b0ee0e38dca447cb1095f70fb", + "url": "https://files.pythonhosted.org/packages/1a/f6/52a2973ff108d74b5da706a573379eea160bece098f7cfa3f35dc4622710/ruff-0.8.5-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl" }, { "algorithm": "sha256", - "hash": "b076ef717a8e5bc819514ee1d602bbdca5b4420ae13a9cf61a0c0a4f53a2baa2", - "url": "https://files.pythonhosted.org/packages/26/0d/6148a48dab5662ca1d5a93b7c0d13c03abd3cc7e2f35db08410e47cef15d/ruff-0.6.9.tar.gz" + "hash": "587c5e95007612c26509f30acc506c874dab4c4abbacd0357400bd1aa799931b", + "url": "https://files.pythonhosted.org/packages/1b/fe/644b70d473a27b5112ac7a3428edcc1ce0db775c301ff11aa146f71886e0/ruff-0.8.5-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl" }, { "algorithm": "sha256", - "hash": "645d7d8761f915e48a00d4ecc3686969761df69fb561dd914a773c1a8266e14e", - "url": "https://files.pythonhosted.org/packages/29/61/b376d775deb5851cb48d893c568b511a6d3625ef2c129ad5698b64fb523c/ruff-0.6.9-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl" + "hash": "1098d36f69831f7ff2a1da3e6407d5fbd6dfa2559e4f74ff2d260c5588900317", + "url": "https://files.pythonhosted.org/packages/25/5d/4b5403f3e89837decfd54c51bea7f94b7d3fae77e08858603d0e04d7ad17/ruff-0.8.5.tar.gz" }, { "algorithm": "sha256", - "hash": "3c866b631f5fbce896a74a6e4383407ba7507b815ccc52bcedabb6810fdb3ef7", - "url": "https://files.pythonhosted.org/packages/2e/6d/25a4386ae4009fc798bd10ba48c942d1b0b3e459b5403028f1214b6dd161/ruff-0.6.9-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl" + "hash": "f69ab37771ea7e0715fead8624ec42996d101269a96e31f4d31be6fc33aa19b7", + "url": "https://files.pythonhosted.org/packages/55/74/83bb74a44183b904216f3edfb9995b89830c83aaa6ce84627f74da0e0cf8/ruff-0.8.5-py3-none-macosx_10_12_x86_64.whl" }, { "algorithm": "sha256", - "hash": "3ef0cc774b00fec123f635ce5c547dac263f6ee9fb9cc83437c5904183b55ceb", - "url": "https://files.pythonhosted.org/packages/45/87/801a52d26c8dbf73424238e9908b9ceac430d903c8ef35eab1b44fcfa2bd/ruff-0.6.9-py3-none-musllinux_1_2_aarch64.whl" + "hash": "5ad11a5e3868a73ca1fa4727fe7e33735ea78b416313f4368c504dbeb69c0f88", + "url": "https://files.pythonhosted.org/packages/73/f8/03391745a703ce11678eb37c48ae89ec60396ea821e9d0bcea7c8e88fd91/ruff-0.8.5-py3-none-linux_armv6l.whl" }, { "algorithm": "sha256", - "hash": "7d5ccc9e58112441de8ad4b29dcb7a86dc25c5f770e3c06a9d57e0e5eba48829", - "url": "https://files.pythonhosted.org/packages/6c/d6/7f34160818bcb6e84ce293a5966cba368d9112ff0289b273fbb689046047/ruff-0.6.9-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl" + "hash": "7512e8cb038db7f5db6aae0e24735ff9ea03bb0ed6ae2ce534e9baa23c1dc9ea", + "url": "https://files.pythonhosted.org/packages/a5/a8/2a3ea6bacead963f7aeeba0c61815d9b27b0d638e6a74984aa5cc5d27733/ruff-0.8.5-py3-none-musllinux_1_2_armv7l.whl" }, { "algorithm": "sha256", - "hash": "064df58d84ccc0ac0fcd63bc3090b251d90e2a372558c0f057c3f75ed73e1ccd", - "url": "https://files.pythonhosted.org/packages/6e/8f/f7a0a0ef1818662efb32ed6df16078c95da7a0a3248d64c2410c1e27799f/ruff-0.6.9-py3-none-linux_armv6l.whl" + "hash": "9d99cf80b0429cbebf31cbbf6f24f05a29706f0437c40413d950e67e2d4faca4", + "url": "https://files.pythonhosted.org/packages/b6/67/db2df2dd4a34b602d7f6ebb1b3744c8157f0d3579973ffc58309c9c272e8/ruff-0.8.5-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl" }, { "algorithm": "sha256", - "hash": "140d4b5c9f5fc7a7b074908a78ab8d384dd7f6510402267bc76c37195c02a7ec", - "url": "https://files.pythonhosted.org/packages/8b/69/b179a5faf936a9e2ab45bb412a668e4661eded964ccfa19d533f29463ef6/ruff-0.6.9-py3-none-macosx_10_12_x86_64.whl" + "hash": "c01c048f9c3385e0fd7822ad0fd519afb282af9cf1778f3580e540629df89725", + "url": "https://files.pythonhosted.org/packages/ce/1f/3b30f3c65b1303cb8e268ec3b046b77ab21ed8e26921cfc7e8232aa57f2c/ruff-0.8.5-py3-none-musllinux_1_2_aarch64.whl" }, { "algorithm": "sha256", - "hash": "a67267654edc23c97335586774790cde402fb6bbdb3c2314f1fc087dee320bfa", - "url": "https://files.pythonhosted.org/packages/a7/86/96f4252f41840e325b3fa6c48297e661abb9f564bd7dcc0572398c8daa42/ruff-0.6.9-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl" + "hash": "c9d526a62c9eda211b38463528768fd0ada25dad524cb33c0e99fcff1c67b5dc", + "url": "https://files.pythonhosted.org/packages/e3/95/c1d1a1fe36658c1f3e1b47e1cd5f688b72d5786695b9e621c2c38399a95e/ruff-0.8.5-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl" }, { "algorithm": "sha256", - "hash": "12edd2af0c60fa61ff31cefb90aef4288ac4d372b4962c2864aeea3a1a2460c0", - "url": "https://files.pythonhosted.org/packages/be/27/6f7161d90320a389695e32b6ebdbfbedde28ccbf52451e4b723d7ce744ad/ruff-0.6.9-py3-none-musllinux_1_2_armv7l.whl" + "hash": "d56de7220a35607f9fe59f8a6d018e14504f7b71d784d980835e20fc0611cd50", + "url": "https://files.pythonhosted.org/packages/e7/9f/5ee5dcd135411402e35b6ec6a8dfdadbd31c5cd1c36a624d356a38d76090/ruff-0.8.5-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl" }, { "algorithm": "sha256", - "hash": "53fd8ca5e82bdee8da7f506d7b03a261f24cd43d090ea9db9a1dc59d9313914c", - "url": "https://files.pythonhosted.org/packages/c7/ef/fd1b4be979c579d191eeac37b5cfc0ec906de72c8bcd8595e2c81bb700c1/ruff-0.6.9-py3-none-macosx_11_0_arm64.whl" + "hash": "b5462d7804558ccff9c08fe8cbf6c14b7efe67404316696a2dde48297b1925bb", + "url": "https://files.pythonhosted.org/packages/e8/7a/a162a4feb3ef85d594527165e366dde09d7a1e534186ff4ba5d127eda850/ruff-0.8.5-py3-none-macosx_11_0_arm64.whl" }, { "algorithm": "sha256", - "hash": "7b118afbb3202f5911486ad52da86d1d52305b59e7ef2031cea3425142b97d6f", - "url": "https://files.pythonhosted.org/packages/f7/f6/bdf891a9200d692c94ebcd06ae5a2fa5894e522f2c66c2a12dd5d8cb2654/ruff-0.6.9-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl" + "hash": "7b75ac29715ac60d554a049dbb0ef3b55259076181c3369d79466cb130eb5afd", + "url": "https://files.pythonhosted.org/packages/fe/ff/fe3a6a73006bced73e60d171d154a82430f61d97e787f511a24bd6302611/ruff-0.8.5-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl" } ], "project_name": "ruff", "requires_dists": [], "requires_python": ">=3.7", - "version": "0.6.9" + "version": "0.8.5" }, { "artifacts": [ @@ -321,8 +321,8 @@ "pip_version": "24.1.2", "prefer_older_binary": false, "requirements": [ - "ruff-lsp~=0.0.56", - "ruff~=0.6.4" + "ruff-lsp~=0.0.59", + "ruff~=0.8.5" ], "requires_python": [ "==3.12.8" From fc28f22d3c445412f2242f8b640adef732c8f3a2 Mon Sep 17 00:00:00 2001 From: Joongi Kim Date: Thu, 2 Jan 2025 22:44:16 +0900 Subject: [PATCH 6/9] style: Apply the new Ruff preview style - astral-sh#11056 is now resolved by astral-sh#13860. --- .../backend/accelerator/cuda_open/plugin.py | 2 +- src/ai/backend/accelerator/mock/plugin.py | 4 +- src/ai/backend/account_manager/server.py | 6 +- src/ai/backend/agent/agent.py | 10 +-- src/ai/backend/agent/docker/agent.py | 6 +- src/ai/backend/agent/docker/utils.py | 2 +- src/ai/backend/agent/kubernetes/agent.py | 2 +- src/ai/backend/agent/kubernetes/utils.py | 2 +- src/ai/backend/agent/server.py | 6 +- src/ai/backend/agent/watcher.py | 2 +- src/ai/backend/cli/interaction.py | 6 +- src/ai/backend/client/cli/admin/image.py | 2 +- src/ai/backend/client/cli/pretty.py | 4 +- src/ai/backend/client/cli/service.py | 4 +- .../backend/client/cli/session/lifecycle.py | 6 +- src/ai/backend/client/cli/vfolder.py | 2 +- src/ai/backend/client/func/image.py | 28 ++----- src/ai/backend/client/func/network.py | 10 +-- src/ai/backend/client/func/vfolder.py | 2 +- src/ai/backend/client/output/formatters.py | 12 +-- src/ai/backend/client/request.py | 18 ++--- src/ai/backend/common/docker.py | 2 +- src/ai/backend/common/events_experimental.py | 2 +- src/ai/backend/common/logging.py | 3 +- src/ai/backend/common/redis_helper.py | 6 +- src/ai/backend/common/typed_validators.py | 12 +-- src/ai/backend/common/types.py | 2 +- src/ai/backend/install/context.py | 4 +- src/ai/backend/kernel/base.py | 6 +- src/ai/backend/kernel/intrinsic.py | 6 +- src/ai/backend/kernel/service.py | 2 +- src/ai/backend/manager/api/auth.py | 10 +-- src/ai/backend/manager/api/manager.py | 2 +- src/ai/backend/manager/api/resource.py | 4 +- src/ai/backend/manager/api/session.py | 14 ++-- src/ai/backend/manager/api/stream.py | 6 +- src/ai/backend/manager/api/utils.py | 4 +- src/ai/backend/manager/api/vfolder.py | 16 ++-- src/ai/backend/manager/cli/__main__.py | 8 +- src/ai/backend/manager/cli/image_impl.py | 8 +- .../manager/container_registry/aws_ecr.py | 2 +- .../manager/container_registry/base.py | 8 +- .../manager/container_registry/docker.py | 2 +- .../manager/container_registry/github.py | 2 +- .../b6b884fbae1f_add_session_table.py | 2 +- src/ai/backend/manager/models/dotfile.py | 2 +- src/ai/backend/manager/models/endpoint.py | 4 +- src/ai/backend/manager/models/gql.py | 8 +- .../manager/models/gql_models/group.py | 2 +- .../manager/models/gql_models/image.py | 8 +- .../manager/models/gql_models/session.py | 2 +- src/ai/backend/manager/models/group.py | 2 +- src/ai/backend/manager/models/kernel.py | 8 +- src/ai/backend/manager/models/storage.py | 2 +- src/ai/backend/manager/models/vfolder.py | 6 +- src/ai/backend/manager/openapi.py | 2 +- src/ai/backend/manager/registry.py | 8 +- .../backend/manager/scheduler/dispatcher.py | 2 +- src/ai/backend/manager/server.py | 2 +- src/ai/backend/storage/api/client.py | 4 +- src/ai/backend/storage/config.py | 2 +- src/ai/backend/storage/migration.py | 4 +- src/ai/backend/storage/netapp/netappclient.py | 6 +- .../backend/storage/vast/vastdata_client.py | 2 +- src/ai/backend/storage/weka/__init__.py | 2 +- .../test/cli_integration/admin/test_domain.py | 36 ++++----- .../cli_integration/admin/test_keypair.py | 42 +++++----- .../admin/test_keypair_resource_policy.py | 76 +++++++++---------- .../admin/test_scaling_group.py | 12 +-- .../test/cli_integration/admin/test_user.py | 30 ++++---- .../test/cli_integration/user/test_vfolder.py | 36 ++++----- src/ai/backend/web/server.py | 6 +- src/ai/backend/wsproxy/proxy/backend/http.py | 2 +- tests/common/redis_helper/docker.py | 2 +- tests/common/test_docker.py | 4 +- tests/manager/conftest.py | 4 +- tests/manager/test_agent_selector.py | 4 +- 77 files changed, 288 insertions(+), 313 deletions(-) diff --git a/src/ai/backend/accelerator/cuda_open/plugin.py b/src/ai/backend/accelerator/cuda_open/plugin.py index 6e7dee968a..8861bd76a9 100644 --- a/src/ai/backend/accelerator/cuda_open/plugin.py +++ b/src/ai/backend/accelerator/cuda_open/plugin.py @@ -169,7 +169,7 @@ async def list_devices(self) -> Collection[CUDADevice]: if dev_id in self.device_mask: continue raw_info = libcudart.get_device_props(int(dev_id)) - sysfs_node_path = f"/sys/bus/pci/devices/{raw_info["pciBusID_str"].lower()}/numa_node" + sysfs_node_path = f"/sys/bus/pci/devices/{raw_info['pciBusID_str'].lower()}/numa_node" node: Optional[int] try: node = int(Path(sysfs_node_path).read_text().strip()) diff --git a/src/ai/backend/accelerator/mock/plugin.py b/src/ai/backend/accelerator/mock/plugin.py index f7984be308..4e1fa3e109 100644 --- a/src/ai/backend/accelerator/mock/plugin.py +++ b/src/ai/backend/accelerator/mock/plugin.py @@ -297,7 +297,7 @@ async def list_devices(self) -> Collection[MockDevice]: init_kwargs["is_mig_device"] = dev_info["is_mig_device"] if dev_info["is_mig_device"]: init_kwargs["device_id"] = DeviceId( - f"MIG-{dev_info["mother_uuid"]}/{idx}/0" + f"MIG-{dev_info['mother_uuid']}/{idx}/0" ) device_cls = CUDADevice case _: @@ -810,7 +810,7 @@ def get_metadata(self) -> AcceleratorMetadata: device_format = self.device_formats[format_key] return { - "slot_name": f"{self.mock_config["slot_name"]}.{format_key}", + "slot_name": f"{self.mock_config['slot_name']}.{format_key}", "human_readable_name": device_format["human_readable_name"], "description": device_format["description"], "display_unit": device_format["display_unit"], diff --git a/src/ai/backend/account_manager/server.py b/src/ai/backend/account_manager/server.py index 12eab464ab..6105a02fee 100644 --- a/src/ai/backend/account_manager/server.py +++ b/src/ai/backend/account_manager/server.py @@ -299,9 +299,9 @@ async def server_main( try: ssl_ctx = None if am_cfg.ssl_enabled: - assert ( - am_cfg.ssl_cert is not None - ), "Should set `account_manager.ssl-cert` in config file." + assert am_cfg.ssl_cert is not None, ( + "Should set `account_manager.ssl-cert` in config file." + ) ssl_ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH) ssl_ctx.load_cert_chain( str(am_cfg.ssl_cert), diff --git a/src/ai/backend/agent/agent.py b/src/ai/backend/agent/agent.py index 907a31e527..67ef3e4b38 100644 --- a/src/ai/backend/agent/agent.py +++ b/src/ai/backend/agent/agent.py @@ -2035,7 +2035,7 @@ async def create_kernel( if len(overlapping_services) > 0: raise AgentError( f"Port {port_no} overlaps with built-in service" - f" {overlapping_services[0]["name"]}" + f" {overlapping_services[0]['name']}" ) preopen_sport: ServicePort = { @@ -2377,7 +2377,7 @@ async def load_model_definition( if not model_definition_path: raise AgentError( - f"Model definition file ({" or ".join(model_definition_candidates)}) does not exist under vFolder" + f"Model definition file ({' or '.join(model_definition_candidates)}) does not exist under vFolder" f" {model_folder.name} (ID {model_folder.vfid})", ) try: @@ -2408,11 +2408,11 @@ async def load_model_definition( ] if len(overlapping_services) > 0: raise AgentError( - f"Port {service["port"]} overlaps with built-in service" - f" {overlapping_services[0]["name"]}" + f"Port {service['port']} overlaps with built-in service" + f" {overlapping_services[0]['name']}" ) service_ports.append({ - "name": f"{model["name"]}-{service["port"]}", + "name": f"{model['name']}-{service['port']}", "protocol": ServicePortProtocols.PREOPEN, "container_ports": (service["port"],), "host_ports": (None,), diff --git a/src/ai/backend/agent/docker/agent.py b/src/ai/backend/agent/docker/agent.py index 3153d88b9f..74c771813a 100644 --- a/src/ai/backend/agent/docker/agent.py +++ b/src/ai/backend/agent/docker/agent.py @@ -933,7 +933,7 @@ async def start_container( label for label in service_ports_label if label ]) update_nested_dict(container_config, self.computer_docker_args) - kernel_name = f"kernel.{self.image_ref.name.split("/")[-1]}.{self.kernel_id}" + kernel_name = f"kernel.{self.image_ref.name.split('/')[-1]}.{self.kernel_id}" # optional local override of docker config extra_container_opts_name = "agent-docker-container-opts.json" @@ -1202,7 +1202,7 @@ async def __ainit__(self) -> None: { "Cmd": [ f"UNIX-LISTEN:/ipc/{self.agent_sockpath.name},unlink-early,fork,mode=777", - f"TCP-CONNECT:127.0.0.1:{self.local_config["agent"]["agent-sock-port"]}", + f"TCP-CONNECT:127.0.0.1:{self.local_config['agent']['agent-sock-port']}", ], "HostConfig": { "Mounts": [ @@ -1449,7 +1449,7 @@ async def handle_agent_socket(self): while True: agent_sock = zmq_ctx.socket(zmq.REP) try: - agent_sock.bind(f"tcp://127.0.0.1:{self.local_config["agent"]["agent-sock-port"]}") + agent_sock.bind(f"tcp://127.0.0.1:{self.local_config['agent']['agent-sock-port']}") while True: msg = await agent_sock.recv_multipart() if not msg: diff --git a/src/ai/backend/agent/docker/utils.py b/src/ai/backend/agent/docker/utils.py index 76ff9d787f..966b8d9b04 100644 --- a/src/ai/backend/agent/docker/utils.py +++ b/src/ai/backend/agent/docker/utils.py @@ -64,7 +64,7 @@ async def get_container_version_and_status(self) -> Tuple[int, bool]: raise if c["Config"].get("Labels", {}).get("ai.backend.system", "0") != "1": raise RuntimeError( - f"An existing container named \"{c["Name"].lstrip("/")}\" is not a system container" + f'An existing container named "{c["Name"].lstrip("/")}" is not a system container' " spawned by Backend.AI. Please check and remove it." ) return ( diff --git a/src/ai/backend/agent/kubernetes/agent.py b/src/ai/backend/agent/kubernetes/agent.py index 8db627eb48..ac74f5ea8f 100644 --- a/src/ai/backend/agent/kubernetes/agent.py +++ b/src/ai/backend/agent/kubernetes/agent.py @@ -887,7 +887,7 @@ async def check_krunner_pv_status(self): new_pv.label("backend.ai/backend-ai-scratch-volume", "hostPath") else: raise NotImplementedError( - f'Scratch type {self.local_config["container"]["scratch-type"]} is not' + f"Scratch type {self.local_config['container']['scratch-type']} is not" " supported", ) diff --git a/src/ai/backend/agent/kubernetes/utils.py b/src/ai/backend/agent/kubernetes/utils.py index f8bea4abd6..fc0f5836ae 100644 --- a/src/ai/backend/agent/kubernetes/utils.py +++ b/src/ai/backend/agent/kubernetes/utils.py @@ -63,7 +63,7 @@ async def get_container_version_and_status(self) -> Tuple[int, bool]: raise if c["Config"].get("Labels", {}).get("ai.backend.system", "0") != "1": raise RuntimeError( - f"An existing container named \"{c["Name"].lstrip("/")}\" is not a system container" + f'An existing container named "{c["Name"].lstrip("/")}" is not a system container' " spawned by Backend.AI. Please check and remove it." ) return ( diff --git a/src/ai/backend/agent/server.py b/src/ai/backend/agent/server.py index 5ea85868f7..f473561250 100644 --- a/src/ai/backend/agent/server.py +++ b/src/ai/backend/agent/server.py @@ -971,7 +971,7 @@ async def server_main( log.info("Preparing kernel runner environments...") kernel_mod = importlib.import_module( - f"ai.backend.agent.{local_config["agent"]["backend"].value}.kernel", + f"ai.backend.agent.{local_config['agent']['backend'].value}.kernel", ) krunner_volumes = await kernel_mod.prepare_krunner_env(local_config) # type: ignore # TODO: merge k8s branch: nfs_mount_path = local_config['baistatic']['mounted-at'] @@ -991,8 +991,8 @@ async def server_main( } scope_prefix_map = { ConfigScopes.GLOBAL: "", - ConfigScopes.SGROUP: f"sgroup/{local_config["agent"]["scaling-group"]}", - ConfigScopes.NODE: f"nodes/agents/{local_config["agent"]["id"]}", + ConfigScopes.SGROUP: f"sgroup/{local_config['agent']['scaling-group']}", + ConfigScopes.NODE: f"nodes/agents/{local_config['agent']['id']}", } etcd = AsyncEtcd( local_config["etcd"]["addr"], diff --git a/src/ai/backend/agent/watcher.py b/src/ai/backend/agent/watcher.py index 59f9558482..ac1862e4fa 100644 --- a/src/ai/backend/agent/watcher.py +++ b/src/ai/backend/agent/watcher.py @@ -409,7 +409,7 @@ def main( fn = Path(cfg["logging"]["file"]["filename"]) cfg["logging"]["file"]["filename"] = f"{fn.stem}-watcher{fn.suffix}" - setproctitle(f"backend.ai: watcher {cfg["etcd"]["namespace"]}") + setproctitle(f"backend.ai: watcher {cfg['etcd']['namespace']}") with logger: log.info("Backend.AI Agent Watcher {0}", VERSION) log.info("runtime: {0}", utils.env_info()) diff --git a/src/ai/backend/cli/interaction.py b/src/ai/backend/cli/interaction.py index adcbe18ab2..bfe79ba2f9 100644 --- a/src/ai/backend/cli/interaction.py +++ b/src/ai/backend/cli/interaction.py @@ -73,12 +73,12 @@ def ask_string_in_array(prompt: str, choices: list, default: str) -> Optional[st if default: question = ( - f"{prompt} (choices: {"/".join(choices)}, " + f"{prompt} (choices: {'/'.join(choices)}, " f"if left empty, this will use default value: {default}): " ) else: question = ( - f"{prompt} (choices: {"/".join(choices)}, if left empty, this will remove this key): " + f"{prompt} (choices: {'/'.join(choices)}, if left empty, this will remove this key): " ) while True: @@ -92,7 +92,7 @@ def ask_string_in_array(prompt: str, choices: list, default: str) -> Optional[st elif user_reply.lower() in choices: break else: - print(f"Please answer in {"/".join(choices)}.") + print(f"Please answer in {'/'.join(choices)}.") return user_reply diff --git a/src/ai/backend/client/cli/admin/image.py b/src/ai/backend/client/cli/admin/image.py index e3d0da3c92..11101a90b9 100644 --- a/src/ai/backend/client/cli/admin/image.py +++ b/src/ai/backend/client/cli/admin/image.py @@ -61,7 +61,7 @@ async def rescan_images_impl(registry: str) -> None: print_error(e) sys.exit(ExitCode.FAILURE) if not result["ok"]: - print_fail(f"Failed to begin registry scanning: {result["msg"]}") + print_fail(f"Failed to begin registry scanning: {result['msg']}") sys.exit(ExitCode.FAILURE) print_done("Started updating the image metadata from the configured registries.") bgtask_id = result["task_id"] diff --git a/src/ai/backend/client/cli/pretty.py b/src/ai/backend/client/cli/pretty.py index f98382b291..65e25b444c 100644 --- a/src/ai/backend/client/cli/pretty.py +++ b/src/ai/backend/client/cli/pretty.py @@ -127,7 +127,7 @@ def format_error(exc: Exception): if matches: yield "\nCandidates (up to 10 recent entries):\n" for item in matches: - yield f"- {item["id"]} ({item["name"]}, {item["status"]})\n" + yield f"- {item['id']} ({item['name']}, {item['status']})\n" elif exc.data["type"].endswith("/session-already-exists"): existing_session_id = exc.data["data"].get("existingSessionId", None) if existing_session_id is not None: @@ -144,7 +144,7 @@ def format_error(exc: Exception): if exc.data["type"].endswith("/graphql-error"): yield "\n\u279c Message:\n" for err_item in exc.data.get("data", []): - yield f"{err_item["message"]}" + yield f"{err_item['message']}" if err_path := err_item.get("path"): yield f" (path: {_format_gql_path(err_path)})" yield "\n" diff --git a/src/ai/backend/client/cli/service.py b/src/ai/backend/client/cli/service.py index 22bc8f8a67..fac3c76d40 100644 --- a/src/ai/backend/client/cli/service.py +++ b/src/ai/backend/client/cli/service.py @@ -113,7 +113,7 @@ def info(ctx: CLIContext, service_name_or_id: str): ) print() for route in routes: - print(f"Route {route["routing_id"]}: ") + print(f"Route {route['routing_id']}: ") ctx.output.print_item( route, _default_routing_fields, @@ -645,7 +645,7 @@ def generate_token(ctx: CLIContext, service_name_or_id: str, duration: str, quie if quiet: print(resp["token"]) else: - print_done(f"Generated API token {resp["token"]}") + print_done(f"Generated API token {resp['token']}") except Exception as e: ctx.output.print_error(e) sys.exit(ExitCode.FAILURE) diff --git a/src/ai/backend/client/cli/session/lifecycle.py b/src/ai/backend/client/cli/session/lifecycle.py index 927de073de..a70d29ca8c 100644 --- a/src/ai/backend/client/cli/session/lifecycle.py +++ b/src/ai/backend/client/cli/session/lifecycle.py @@ -977,7 +977,7 @@ async def cmd_main() -> None: session = api_sess.ComputeSession.from_session_id(session_id) resp = await session.update(priority=priority) item = resp["item"] - print_done(f"Session {item["name"]!r} priority is changed to {item["priority"]}.") + print_done(f"Session {item['name']!r} priority is changed to {item['priority']}.") try: asyncio.run(cmd_main()) @@ -1372,7 +1372,7 @@ def watch( session_names = _fetch_session_names() if not session_names: if output == "json": - sys.stderr.write(f'{json.dumps({"ok": False, "reason": "No matching items."})}\n') + sys.stderr.write(f"{json.dumps({'ok': False, 'reason': 'No matching items.'})}\n") else: print_fail("No matching items.") sys.exit(ExitCode.FAILURE) @@ -1394,7 +1394,7 @@ def watch( else: if output == "json": sys.stderr.write( - f'{json.dumps({"ok": False, "reason": "No matching items."})}\n' + f"{json.dumps({'ok': False, 'reason': 'No matching items.'})}\n" ) else: print_fail("No matching items.") diff --git a/src/ai/backend/client/cli/vfolder.py b/src/ai/backend/client/cli/vfolder.py index bf0e65d22f..584c900843 100644 --- a/src/ai/backend/client/cli/vfolder.py +++ b/src/ai/backend/client/cli/vfolder.py @@ -426,7 +426,7 @@ def request_download(name, filename): with Session() as session: try: response = json.loads(session.VFolder(name).request_download(filename)) - print_done(f'Download token: {response["token"]}') + print_done(f"Download token: {response['token']}") except Exception as e: print_error(e) sys.exit(ExitCode.FAILURE) diff --git a/src/ai/backend/client/func/image.py b/src/ai/backend/client/func/image.py index 08c9f0793d..6fe2757b13 100644 --- a/src/ai/backend/client/func/image.py +++ b/src/ai/backend/client/func/image.py @@ -37,13 +37,7 @@ async def list( """ Fetches the list of registered images in this cluster. """ - q = ( - "query($is_operation: Boolean) {" - " images(is_operation: $is_operation) {" - " $fields" - " }" - "}" - ) + q = "query($is_operation: Boolean) { images(is_operation: $is_operation) { $fields }}" q = q.replace("$fields", " ".join(f.field_ref for f in fields)) variables = { "is_operation": operation, @@ -87,7 +81,7 @@ async def get_by_id( """ Fetches the information about registered image in this cluster. """ - q = "query($id: String!) {" " image(id: $id) {" " $fields" " }" "}" + q = "query($id: String!) { image(id: $id) { $fields }}" q = q.replace("$fields", " ".join(f.field_ref for f in fields)) variables = { "id": id, @@ -104,7 +98,7 @@ async def list_customized( """ Fetches the list of customized images in this cluster. """ - q = "query {" " customized_images {" " $fields" " }" "}" + q = "query { customized_images { $fields }}" q = q.replace("$fields", " ".join(f.field_ref for f in fields)) data = await api_session.get().Admin._query(q, {}) return data["customized_images"] @@ -128,13 +122,7 @@ async def rescan_images(cls, registry: str): @api_function @classmethod async def forget_image_by_id(cls, image_id: str): - q = ( - "mutation($image_id: String!) {" - " forget_image_by_id(image_id: $image_id) {" - " ok msg" - " }" - "}" - ) + q = "mutation($image_id: String!) { forget_image_by_id(image_id: $image_id) { ok msg }}" variables = { "image_id": image_id, } @@ -144,13 +132,7 @@ async def forget_image_by_id(cls, image_id: str): @api_function @classmethod async def untag_image_from_registry(cls, id: str): - q = ( - "mutation($id: String!) {" - " untag_image_from_registry(id: $id) {" - " ok msg" - " }" - "}" - ) + q = "mutation($id: String!) { untag_image_from_registry(id: $id) { ok msg }}" variables = { "id": id, } diff --git a/src/ai/backend/client/func/network.py b/src/ai/backend/client/func/network.py index b28b9f04a1..edc1adc039 100644 --- a/src/ai/backend/client/func/network.py +++ b/src/ai/backend/client/func/network.py @@ -93,7 +93,7 @@ async def get( """ Fetches the information of the network. """ - q = "query($id: String!) {" " network(id: $id) {" " $fields" " }" "}" + q = "query($id: String!) { network(id: $id) { $fields }}" q = q.replace("$fields", " ".join(f.field_ref for f in (fields or _default_list_fields))) data = await api_session.get().Admin._query(q, {"id": str(self.network_id)}) return data["images"] @@ -122,13 +122,7 @@ async def delete(self) -> None: """ Deletes network. Delete only works for networks that are not attached to active session. """ - q = ( - "mutation($network: String!) {" - " delete_network(network: $network) {" - " ok msg" - " }" - "}" - ) + q = "mutation($network: String!) { delete_network(network: $network) { ok msg }}" variables = { "network": str(self.network_id), } diff --git a/src/ai/backend/client/func/vfolder.py b/src/ai/backend/client/func/vfolder.py index 6e3bd96f64..7e0e5d9470 100644 --- a/src/ai/backend/client/func/vfolder.py +++ b/src/ai/backend/client/func/vfolder.py @@ -495,7 +495,7 @@ async def _upload_files( input_file = open(base_path / file_path, "rb") else: input_file = open(str(Path(file_path).relative_to(base_path)), "rb") - print(f"Uploading {base_path / file_path} via {upload_info["url"]} ...") + print(f"Uploading {base_path / file_path} via {upload_info['url']} ...") # TODO: refactor out the progress bar uploader = tus_client.async_uploader( file_stream=input_file, diff --git a/src/ai/backend/client/output/formatters.py b/src/ai/backend/client/output/formatters.py index 063f46843c..2ec224f5b6 100644 --- a/src/ai/backend/client/output/formatters.py +++ b/src/ai/backend/client/output/formatters.py @@ -17,7 +17,7 @@ def format_stats(raw_stats: Optional[str], indent="") -> str: if raw_stats is None: return "(unavailable)" stats = json.loads(raw_stats) - text = "\n".join(f"- {k + ": ":18s}{v}" for k, v in stats.items()) + text = "\n".join(f"- {k + ': ':18s}{v}" for k, v in stats.items()) return "\n" + textwrap.indent(text, indent) @@ -239,7 +239,7 @@ def format_value(metric: MetricValue, binary: bool) -> str: if metric["pct"] is None: node_metric_bufs.append(f"{stat_key}: (calculating...) % ({num_cores} cores)") else: - node_metric_bufs.append(f"{stat_key}: {metric["pct"]} % ({num_cores} cores)") + node_metric_bufs.append(f"{stat_key}: {metric['pct']} % ({num_cores} cores)") else: binary = stat_key == "mem" node_metric_bufs.append(f"{stat_key}: {format_value(metric, binary)}") @@ -322,10 +322,10 @@ def format_console(self, value: Any, field: FieldSpec, indent="") -> str: else: text = "" for item in value: - text += f"+ {item["id"]}\n" + text += f"+ {item['id']}\n" text += "\n".join( f" - {f.humanized_name}: " - f"{_fit_multiline_in_cell(f.formatter.format_console(item[f.field_name], f), " ")}" # noqa + f"{_fit_multiline_in_cell(f.formatter.format_console(item[f.field_name], f), ' ')}" # noqa for f in field.subfields.values() if f.field_name != "id" ) @@ -340,10 +340,10 @@ def format_console(self, value: Any, field: FieldSpec, indent="") -> str: else: text = "" for item in value: - text += f"+ {item["name"]} ({item["id"]})\n" + text += f"+ {item['name']} ({item['id']})\n" text += "\n".join( f" - {f.humanized_name}: " - f"{_fit_multiline_in_cell(f.formatter.format_console(item[f.field_name], f), " ")}" # noqa + f"{_fit_multiline_in_cell(f.formatter.format_console(item[f.field_name], f), ' ')}" # noqa for f in field.subfields.values() if f.field_name not in ("id", "name") ) diff --git a/src/ai/backend/client/request.py b/src/ai/backend/client/request.py index 36aa61292f..2db2e1d323 100644 --- a/src/ai/backend/client/request.py +++ b/src/ai/backend/client/request.py @@ -191,9 +191,9 @@ def set_content( """ Sets the content of the request. """ - assert ( - self._attached_files is None - ), "cannot set content because you already attached files." + assert self._attached_files is None, ( + "cannot set content because you already attached files." + ) guessed_content_type = "application/octet-stream" if value is None: guessed_content_type = "text/plain" @@ -349,9 +349,9 @@ def connect_websocket(self, **kwargs) -> WebSocketContextManager: This method only works with :class:`~ai.backend.client.session.AsyncSession`. """ - assert isinstance( - self.session, AsyncSession - ), "Cannot use websockets with sessions in the synchronous mode" + assert isinstance(self.session, AsyncSession), ( + "Cannot use websockets with sessions in the synchronous mode" + ) assert self.method == "GET", "Invalid websocket method" self.date = datetime.now(tzutc()) assert self.date is not None @@ -378,9 +378,9 @@ def connect_events(self, **kwargs) -> SSEContextManager: This method only works with :class:`~ai.backend.client.session.AsyncSession`. """ - assert isinstance( - self.session, AsyncSession - ), "Cannot use event streams with sessions in the synchronous mode" + assert isinstance(self.session, AsyncSession), ( + "Cannot use event streams with sessions in the synchronous mode" + ) assert self.method == "GET", "Invalid event stream method" self.date = datetime.now(tzutc()) assert self.date is not None diff --git a/src/ai/backend/common/docker.py b/src/ai/backend/common/docker.py index 3fe01e4bf8..496faba5fd 100644 --- a/src/ai/backend/common/docker.py +++ b/src/ai/backend/common/docker.py @@ -561,7 +561,7 @@ def generate_aliases(self) -> Mapping[str, "ImageRef"]: for name in possible_names: ret[name] = self for name, ptags in itertools.product(possible_names, itertools.product(*possible_ptags)): - ret[f"{name}:{"-".join(t for t in ptags if t)}"] = self + ret[f"{name}:{'-'.join(t for t in ptags if t)}"] = self return ret @staticmethod diff --git a/src/ai/backend/common/events_experimental.py b/src/ai/backend/common/events_experimental.py index ae9f4228b1..e81ef3c7ad 100644 --- a/src/ai/backend/common/events_experimental.py +++ b/src/ai/backend/common/events_experimental.py @@ -211,7 +211,7 @@ def show_retry_warning( now = time.perf_counter() if (warn_on_first_attempt and retry_log_count == 0) or now - last_log_time >= 10.0: log.warning( - "Retrying due to interruption of Redis connection " "({}, retrying-for: {:.3f}s)", + "Retrying due to interruption of Redis connection ({}, retrying-for: {:.3f}s)", repr(e), now - first_trial, ) diff --git a/src/ai/backend/common/logging.py b/src/ai/backend/common/logging.py index ed5d5a789c..8d31bfe1f4 100644 --- a/src/ai/backend/common/logging.py +++ b/src/ai/backend/common/logging.py @@ -6,7 +6,6 @@ __all__ = ("BraceStyleAdapter",) warnings.warn( - "Please import BraceStyleAdapter from ai.backend.logging " - "instead of ai.backend.common.logging", + "Please import BraceStyleAdapter from ai.backend.logging instead of ai.backend.common.logging", DeprecationWarning, ) diff --git a/src/ai/backend/common/redis_helper.py b/src/ai/backend/common/redis_helper.py index 3696959724..c431fbafbc 100644 --- a/src/ai/backend/common/redis_helper.py +++ b/src/ai/backend/common/redis_helper.py @@ -495,9 +495,9 @@ def get_redis_object( service_name = redis_config.get("service_name") password = redis_config.get("password") - assert ( - service_name is not None - ), "config/redis/service_name is required when using Redis Sentinel" + assert service_name is not None, ( + "config/redis/service_name is required when using Redis Sentinel" + ) sentinel = Sentinel( [(str(host), port) for host, port in sentinel_addresses], diff --git a/src/ai/backend/common/typed_validators.py b/src/ai/backend/common/typed_validators.py index be0a3e05d1..44855a5e5f 100644 --- a/src/ai/backend/common/typed_validators.py +++ b/src/ai/backend/common/typed_validators.py @@ -70,12 +70,12 @@ def time_duration_serializer(cls, value: TVariousDelta) -> float | str: case relativedelta(): # just like the deserializer, serializing relativedelta is only supported when year or month (not both) is supplied # years or months being normalized is not considered as a valid case since relativedelta does not allow fraction of years or months as an input - assert not ( - value.years and value.months - ), "Serializing relativedelta with both years and months contained is not supported" - assert ( - value.years or value.months - ), "Serialization is supported only for months or years field" + assert not (value.years and value.months), ( + "Serializing relativedelta with both years and months contained is not supported" + ) + assert value.years or value.months, ( + "Serialization is supported only for months or years field" + ) if value.years: return f"{value.years}yr" elif value.months: diff --git a/src/ai/backend/common/types.py b/src/ai/backend/common/types.py index 71e388f24a..b01fe58cf0 100644 --- a/src/ai/backend/common/types.py +++ b/src/ai/backend/common/types.py @@ -703,7 +703,7 @@ def normalize_slots(self, *, ignore_unknown: bool) -> ResourceSlot: known_slots = current_resource_slots.get() unset_slots = known_slots.keys() - self.data.keys() if not ignore_unknown and (unknown_slots := self.data.keys() - known_slots.keys()): - raise ValueError(f"Unknown slots: {", ".join(map(repr, unknown_slots))}") + raise ValueError(f"Unknown slots: {', '.join(map(repr, unknown_slots))}") data = {k: v for k, v in self.data.items() if k in known_slots} for k in unset_slots: data[k] = Decimal(0) diff --git a/src/ai/backend/install/context.py b/src/ai/backend/install/context.py index c7b79ddf7c..90fefb3b67 100644 --- a/src/ai/backend/install/context.py +++ b/src/ai/backend/install/context.py @@ -628,8 +628,8 @@ async def configure_client(self) -> None: file=fp, ) print("export BACKEND_ENDPOINT_TYPE=api", file=fp) - print(f"export BACKEND_ACCESS_KEY={keypair["access_key"]}", file=fp) - print(f"export BACKEND_SECRET_KEY={keypair["secret_key"]}", file=fp) + print(f"export BACKEND_ACCESS_KEY={keypair['access_key']}", file=fp) + print(f"export BACKEND_SECRET_KEY={keypair['secret_key']}", file=fp) with self.resource_path("ai.backend.install.fixtures", "example-users.json") as user_path: current_shell = os.environ.get("SHELL", "sh") user_data = json.loads(Path(user_path).read_bytes()) diff --git a/src/ai/backend/kernel/base.py b/src/ai/backend/kernel/base.py index 762f49582d..c60a187449 100644 --- a/src/ai/backend/kernel/base.py +++ b/src/ai/backend/kernel/base.py @@ -698,7 +698,7 @@ async def start_model_service(self, model_info): if model_service_info is None: result = {"status": "failed", "error": "service info not provided"} return - service_name = f"{model_info["name"]}-{model_service_info["port"]}" + service_name = f"{model_info['name']}-{model_service_info['port']}" self.service_parser.add_model_service(service_name, model_service_info) service_info = { "name": service_name, @@ -734,7 +734,7 @@ async def start_model_service(self, model_info): async def check_model_health(self, model_name, model_service_info): health_check_info = model_service_info.get("health_check") health_check_endpoint = ( - f"http://localhost:{model_service_info["port"]}{health_check_info["path"]}" + f"http://localhost:{model_service_info['port']}{health_check_info['path']}" ) retries = 0 current_health_status = HealthStatus.UNDETERMINED @@ -885,7 +885,7 @@ async def _start_service( await terminate_and_wait(proc, timeout=10.0) self.services_running.pop(service_info["name"], None) error_reason = ( - f"opening the service port timed out: {service_info["name"]}" + f"opening the service port timed out: {service_info['name']}" ) else: error_reason = "TimeoutError (unknown)" diff --git a/src/ai/backend/kernel/intrinsic.py b/src/ai/backend/kernel/intrinsic.py index 1b6cea0cb6..806aec1f7b 100644 --- a/src/ai/backend/kernel/intrinsic.py +++ b/src/ai/backend/kernel/intrinsic.py @@ -36,7 +36,7 @@ async def init_sshd_service(child_env): ) stdout, stderr = await proc.communicate() if proc.returncode != 0: - raise RuntimeError(f"sshd init error: {stderr.decode("utf8")}") + raise RuntimeError(f"sshd init error: {stderr.decode('utf8')}") pub_key = stdout.splitlines()[1] auth_path.write_bytes(pub_key) auth_path.chmod(0o600) @@ -57,7 +57,7 @@ async def init_sshd_service(child_env): ) stdout, stderr = await proc.communicate() if proc.returncode != 0: - raise RuntimeError(f"sshd init error: {stderr.decode("utf8")}") + raise RuntimeError(f"sshd init error: {stderr.decode('utf8')}") else: try: if (auth_path.parent.stat().st_mode & 0o077) != 0: @@ -83,7 +83,7 @@ async def init_sshd_service(child_env): ) stdout, stderr = await proc.communicate() if proc.returncode != 0: - raise RuntimeError(f"sshd init error: {stderr.decode("utf8")}") + raise RuntimeError(f"sshd init error: {stderr.decode('utf8')}") cluster_privkey_src_path = Path("/home/config/ssh/id_cluster") cluster_ssh_port_mapping_path = Path("/home/config/ssh/port-mapping.json") diff --git a/src/ai/backend/kernel/service.py b/src/ai/backend/kernel/service.py index dd3688a4b7..2405d5be62 100644 --- a/src/ai/backend/kernel/service.py +++ b/src/ai/backend/kernel/service.py @@ -110,7 +110,7 @@ async def start_service( action_impl = getattr(service_actions, action["action"]) except AttributeError: raise InvalidServiceDefinition( - f"Service-def for {service_name} used invalid action: {action["action"]}" + f"Service-def for {service_name} used invalid action: {action['action']}" ) ret = await action_impl(self.variables, **action["args"]) if (ref := action.get("ref")) is not None: diff --git a/src/ai/backend/manager/api/auth.py b/src/ai/backend/manager/api/auth.py index 094a364942..434b9ed9f0 100644 --- a/src/ai/backend/manager/api/auth.py +++ b/src/ai/backend/manager/api/auth.py @@ -350,9 +350,9 @@ async def sign_request(sign_method: str, request: web.Request, secret_key: str) try: mac_type, hash_type = map(lambda s: s.lower(), sign_method.split("-")) assert mac_type == "hmac", "Unsupported request signing method (MAC type)" - assert ( - hash_type in hashlib.algorithms_guaranteed - ), "Unsupported request signing method (hash type)" + assert hash_type in hashlib.algorithms_guaranteed, ( + "Unsupported request signing method (hash type)" + ) new_api_version = request.headers.get("X-BackendAI-Version") legacy_api_version = request.headers.get("X-Sorna-Version") @@ -1112,8 +1112,8 @@ async def generate_ssh_keypair(request: web.Request) -> web.Response: async def upload_ssh_keypair(request: web.Request, params: Any) -> web.Response: domain_name = request["user"]["domain_name"] access_key = request["keypair"]["access_key"] - pubkey = f"{params["pubkey"].rstrip()}\n" - privkey = f"{params["privkey"].rstrip()}\n" + pubkey = f"{params['pubkey'].rstrip()}\n" + privkey = f"{params['privkey'].rstrip()}\n" log_fmt = "AUTH.SAVE_SSH_KEYPAIR(d:{}, ak:{})" log_args = (domain_name, access_key) log.info(log_fmt, *log_args) diff --git a/src/ai/backend/manager/api/manager.py b/src/ai/backend/manager/api/manager.py index 3454397bdb..0cdc3afcc5 100644 --- a/src/ai/backend/manager/api/manager.py +++ b/src/ai/backend/manager/api/manager.py @@ -252,7 +252,7 @@ async def perform_scheduler_ops(request: web.Request, params: Any) -> web.Respon args = iv_scheduler_ops_args[params["op"]].check(params["args"]) except t.DataError as e: raise InvalidAPIParameters( - f"Input validation failed for args with {params["op"]}", + f"Input validation failed for args with {params['op']}", extra_data=e.as_dict(), ) if params["op"] in (SchedulerOps.INCLUDE_AGENTS, SchedulerOps.EXCLUDE_AGENTS): diff --git a/src/ai/backend/manager/api/resource.py b/src/ai/backend/manager/api/resource.py index bc388a739b..4489d28454 100644 --- a/src/ai/backend/manager/api/resource.py +++ b/src/ai/backend/manager/api/resource.py @@ -162,11 +162,11 @@ async def check_presets(request: web.Request, params: Any) -> web.Response: result = await conn.execute(query) row = result.first() if row is None: - raise InvalidAPIParameters(f"Unknown project (name: {params["group"]})") + raise InvalidAPIParameters(f"Unknown project (name: {params['group']})") group_id = row["id"] group_resource_slots = row["total_resource_slots"] if group_id is None: - raise InvalidAPIParameters(f"Unknown project (name: {params["group"]})") + raise InvalidAPIParameters(f"Unknown project (name: {params['group']})") group_resource_policy = { "total_resource_slots": group_resource_slots, "default_for_unspecified": DefaultForUnspecified.UNLIMITED, diff --git a/src/ai/backend/manager/api/session.py b/src/ai/backend/manager/api/session.py index 814dc01fdc..d119ce39e8 100644 --- a/src/ai/backend/manager/api/session.py +++ b/src/ai/backend/manager/api/session.py @@ -438,7 +438,7 @@ async def _create(request: web.Request, params: dict[str, Any]) -> web.Response: ) return web.json_response(resp, status=201) except UnknownImageReference: - raise UnknownImageReferenceError(f"Unknown image reference: {params["image"]}") + raise UnknownImageReferenceError(f"Unknown image reference: {params['image']}") except BackendError: log.exception("GET_OR_CREATE: exception") raise @@ -618,7 +618,7 @@ async def create_from_template(request: web.Request, params: dict[str, Any]) -> cmd_builder = "git clone " if credential := git.get("credential"): proto, url = git["repository"].split("://") - cmd_builder += f'{proto}://{credential["username"]}:{credential["password"]}@{url}' + cmd_builder += f"{proto}://{credential['username']}:{credential['password']}@{url}" else: cmd_builder += git["repository"] if branch := git.get("branch"): @@ -679,7 +679,7 @@ async def create_from_template(request: web.Request, params: dict[str, Any]) -> async def create_from_params(request: web.Request, params: dict[str, Any]) -> web.Response: if params["session_name"] in ["from-template"]: raise InvalidAPIParameters( - f'Requested session ID {params["session_name"]} is reserved word' + f"Requested session ID {params['session_name']} is reserved word" ) api_version = request["api_version"] if 8 <= api_version[0]: @@ -805,7 +805,7 @@ async def create_cluster(request: web.Request, params: dict[str, Any]) -> web.Re log.exception("GET_OR_CREATE: exception") raise except UnknownImageReference: - raise UnknownImageReferenceError(f"Unknown image reference: {params["image"]}") + raise UnknownImageReferenceError(f"Unknown image reference: {params['image']}") except Exception: await root_ctx.error_monitor.capture_exception() log.exception("GET_OR_CREATE: unexpected error!") @@ -892,7 +892,7 @@ async def start_service(request: web.Request, params: Mapping[str, Any]) -> web. hport_idx = sport["container_ports"].index(params["port"]) except ValueError: raise InvalidAPIParameters( - f"Service {service} does not open the port number {params["port"]}." + f"Service {service} does not open the port number {params['port']}." ) host_port = sport["host_ports"][hport_idx] else: @@ -1245,7 +1245,7 @@ async def _commit_and_upload(reporter: ProgressReporter) -> None: new_name = base_image_ref.name new_canonical = ( - f"{registry_hostname}/{registry_project}/{new_name}:{"-".join(filtered_tag_set)}" + f"{registry_hostname}/{registry_project}/{new_name}:{'-'.join(filtered_tag_set)}" ) async with root_ctx.db.begin_readonly_session() as sess: @@ -1295,7 +1295,7 @@ async def _commit_and_upload(reporter: ProgressReporter) -> None: else: customized_image_id = str(uuid.uuid4()) - new_canonical += f"-customized_{customized_image_id.replace("-", "")}" + new_canonical += f"-customized_{customized_image_id.replace('-', '')}" new_image_ref = ImageRef.from_image_str( new_canonical, None, diff --git a/src/ai/backend/manager/api/stream.py b/src/ai/backend/manager/api/stream.py index 151622d0d9..036c18bf8b 100644 --- a/src/ai/backend/manager/api/stream.py +++ b/src/ai/backend/manager/api/stream.py @@ -170,7 +170,7 @@ async def stream_stdin(): ) run_id = secrets.token_hex(8) if data["type"] == "resize": - code = f"%resize {data["rows"]} {data["cols"]}" + code = f"%resize {data['rows']} {data['cols']}" await root_ctx.registry.execute( session, api_version, @@ -470,7 +470,7 @@ async def stream_proxy( hport_idx = sport["container_ports"].index(params["port"]) except ValueError: raise InvalidAPIParameters( - f"Service {service} does not open the port number {params["port"]}." + f"Service {service} does not open the port number {params['port']}." ) host_port = sport["host_ports"][hport_idx] else: # using the default (primary) port of the app @@ -500,7 +500,7 @@ async def stream_proxy( elif sport["protocol"] == "preopen": proxy_cls = TCPProxy else: - raise InvalidAPIParameters(f"Unsupported service protocol: {sport["protocol"]}") + raise InvalidAPIParameters(f"Unsupported service protocol: {sport['protocol']}") redis_live = root_ctx.redis_live conn_tracker_key = f"session.{kernel_id}.active_app_connections" diff --git a/src/ai/backend/manager/api/utils.py b/src/ai/backend/manager/api/utils.py index 5c2b1294ba..73b27eae4b 100644 --- a/src/ai/backend/manager/api/utils.py +++ b/src/ai/backend/manager/api/utils.py @@ -308,10 +308,10 @@ async def wrapped( if loc := first_error["loc"]: metadata["loc"] = loc[0] metadata_formatted_items = [ - f"type={first_error["type"]}", # format as symbol + f"type={first_error['type']}", # format as symbol *(f"{k}={v!r}" for k, v in metadata.items()), ] - msg = f"{first_error["msg"]} [{", ".join(metadata_formatted_items)}]" + msg = f"{first_error['msg']} [{', '.join(metadata_formatted_items)}]" # To reuse the json serialization provided by pydantic, we call ex.json() and re-parse it. raise InvalidAPIParameters(msg, extra_data=json.loads(ex.json())) result = await handler(request, checked_params, *args, **kwargs) diff --git a/src/ai/backend/manager/api/vfolder.py b/src/ai/backend/manager/api/vfolder.py index 52da5605ce..8d3a864012 100644 --- a/src/ai/backend/manager/api/vfolder.py +++ b/src/ai/backend/manager/api/vfolder.py @@ -394,7 +394,7 @@ async def create(request: web.Request, params: Any) -> web.Response: allowed_vfolder_types = await root_ctx.shared_config.get_vfolder_types() if not verify_vfolder_name(params["name"]): - raise InvalidAPIParameters(f'{params["name"]} is reserved for internal operations.') + raise InvalidAPIParameters(f"{params['name']} is reserved for internal operations.") if params["name"].startswith(".") and params["name"] != ".local": if params["group"] is not None: raise InvalidAPIParameters("dot-prefixed vfolders cannot be a group folder.") @@ -815,7 +815,7 @@ async def list_hosts(request: web.Request, params: Any) -> web.Response: ) allowed_hosts = allowed_hosts | allowed_hosts_by_group all_volumes = await root_ctx.storage_manager.get_all_volumes() - all_hosts = {f"{proxy_name}:{volume_data["name"]}" for proxy_name, volume_data in all_volumes} + all_hosts = {f"{proxy_name}:{volume_data['name']}" for proxy_name, volume_data in all_volumes} allowed_hosts = VFolderHostPermissionMap({ host: perms for host, perms in allowed_hosts.items() if host in all_hosts }) @@ -824,7 +824,7 @@ async def list_hosts(request: web.Request, params: Any) -> web.Response: default_host = None volume_info = { - f"{proxy_name}:{volume_data["name"]}": { + f"{proxy_name}:{volume_data['name']}": { "backend": volume_data["backend"], "capabilities": volume_data["capabilities"], "usage": await fetch_exposed_volume_fields( @@ -838,7 +838,7 @@ async def list_hosts(request: web.Request, params: Any) -> web.Response: ), } for proxy_name, volume_data in all_volumes - if f"{proxy_name}:{volume_data["name"]}" in allowed_hosts + if f"{proxy_name}:{volume_data['name']}" in allowed_hosts } resp = { @@ -860,7 +860,7 @@ async def list_all_hosts(request: web.Request) -> web.Response: access_key, ) all_volumes = await root_ctx.storage_manager.get_all_volumes() - all_hosts = {f"{proxy_name}:{volume_data["name"]}" for proxy_name, volume_data in all_volumes} + all_hosts = {f"{proxy_name}:{volume_data['name']}" for proxy_name, volume_data in all_volumes} default_host = await root_ctx.shared_config.get_raw("volumes/default_host") if default_host not in all_hosts: default_host = None @@ -2116,7 +2116,7 @@ async def share(request: web.Request, params: Any, row: VFolderRow) -> web.Respo users_not_invfolder_group = list(set(params["emails"]) - set(emails_to_share)) raise ObjectNotFound( "Some users do not belong to folder's group:" - f" {",".join(users_not_invfolder_group)}", + f" {','.join(users_not_invfolder_group)}", object_name="user", ) @@ -2749,7 +2749,7 @@ async def clone(request: web.Request, params: Any, row: VFolderRow) -> web.Respo allowed_vfolder_types = await root_ctx.shared_config.get_vfolder_types() if not verify_vfolder_name(params["target_name"]): - raise InvalidAPIParameters(f'{params["target_name"]} is reserved for internal operations.') + raise InvalidAPIParameters(f"{params['target_name']} is reserved for internal operations.") if source_proxy_name != target_proxy_name: raise InvalidAPIParameters("proxy name of source and target vfolders must be equal.") @@ -3162,7 +3162,7 @@ async def list_mounts(request: web.Request) -> web.Response: all_volumes = [*await root_ctx.storage_manager.get_all_volumes()] all_mounts = [volume_data["path"] for proxy_name, volume_data in all_volumes] all_vfolder_hosts = [ - f"{proxy_name}:{volume_data["name"]}" for proxy_name, volume_data in all_volumes + f"{proxy_name}:{volume_data['name']}" for proxy_name, volume_data in all_volumes ] resp: MutableMapping[str, Any] = { "manager": { diff --git a/src/ai/backend/manager/cli/__main__.py b/src/ai/backend/manager/cli/__main__.py index f5fd2dc37e..260faa0665 100644 --- a/src/ai/backend/manager/cli/__main__.py +++ b/src/ai/backend/manager/cli/__main__.py @@ -129,8 +129,8 @@ def dbshell(cli_ctx: CLIContext, container_name, psql_help, psql_args): cmd = [ "psql", ( - f"postgres://{local_config["db"]["user"]}:{local_config["db"]["password"]}" - f"@{local_config["db"]["addr"]}/{local_config["db"]["name"]}" + f"postgres://{local_config['db']['user']}:{local_config['db']['password']}" + f"@{local_config['db']['addr']}/{local_config['db']['name']}" ), *psql_args, ] @@ -198,8 +198,8 @@ def generate_rpc_keypair(cli_ctx: CLIContext, dst_dir: pathlib.Path, name: str) public_key_path, secret_key_path = create_certificates(dst_dir, name) public_key, secret_key = load_certificate(secret_key_path) assert secret_key is not None - print(f"Public Key: {public_key.decode("ascii")} (stored at {public_key_path})") - print(f"Secret Key: {secret_key.decode("ascii")} (stored at {secret_key_path})") + print(f"Public Key: {public_key.decode('ascii')} (stored at {public_key_path})") + print(f"Secret Key: {secret_key.decode('ascii')} (stored at {secret_key_path})") @main.command() diff --git a/src/ai/backend/manager/cli/image_impl.py b/src/ai/backend/manager/cli/image_impl.py index 7268d53f71..1e04ee6b33 100644 --- a/src/ai/backend/manager/cli/image_impl.py +++ b/src/ai/backend/manager/cli/image_impl.py @@ -206,7 +206,7 @@ async def validate_image_alias(cli_ctx, alias: str) -> None: for key, value in validate_image_labels(image_row.labels).items(): print(f"{key:<40}: ", end="") if isinstance(value, list): - value = f'[{", ".join(value)}]' + value = f"[{', '.join(value)}]" print(value) except UnknownImageReference: @@ -236,7 +236,7 @@ async def validate_image_canonical( for key, value in validate_image_labels(image_row.labels).items(): print(f"{key:<40}: ", end="") if isinstance(value, list): - value = f'{", ".join(value)}' + value = f"{', '.join(value)}" print(value) else: rows = await session.scalars(sa.select(ImageRow).where(ImageRow.name == canonical)) @@ -246,11 +246,11 @@ async def validate_image_canonical( for i, image_row in enumerate(image_rows): if i > 0: print("-" * 50) - print(f"{"architecture":<40}: {image_row.architecture}") + print(f"{'architecture':<40}: {image_row.architecture}") for key, value in validate_image_labels(image_row.labels).items(): print(f"{key:<40}: ", end="") if isinstance(value, list): - value = f'{", ".join(value)}' + value = f"{', '.join(value)}" print(value) except UnknownImageReference as e: diff --git a/src/ai/backend/manager/container_registry/aws_ecr.py b/src/ai/backend/manager/container_registry/aws_ecr.py index a29d5aefaf..25fee9a50d 100644 --- a/src/ai/backend/manager/container_registry/aws_ecr.py +++ b/src/ai/backend/manager/container_registry/aws_ecr.py @@ -46,7 +46,7 @@ async def fetch_repositories( yield repo["repositoryName"] case "ecr-public": registry_alias = (repo["repositoryUri"].split("/"))[1] - yield f"{registry_alias}/{repo["repositoryName"]}" + yield f"{registry_alias}/{repo['repositoryName']}" case _: raise ValueError(f"Unknown registry type: {type_}") diff --git a/src/ai/backend/manager/container_registry/base.py b/src/ai/backend/manager/container_registry/base.py index fa613b00b4..023c7b667c 100644 --- a/src/ai/backend/manager/container_registry/base.py +++ b/src/ai/backend/manager/container_registry/base.py @@ -194,7 +194,7 @@ async def commit_rescan_result(self) -> None: resources=update["resources"], ) ) - progress_msg = f"Updated image - {parsed_img.canonical}/{image_identifier.architecture} ({update["config_digest"]})" + progress_msg = f"Updated image - {parsed_img.canonical}/{image_identifier.architecture} ({update['config_digest']})" log.info(progress_msg) break @@ -334,14 +334,14 @@ async def _read_manifest_list( """ manifests = {} for manifest in manifest_list: - platform_arg = f"{manifest["platform"]["os"]}/{manifest["platform"]["architecture"]}" + platform_arg = f"{manifest['platform']['os']}/{manifest['platform']['architecture']}" if variant := manifest["platform"].get("variant", None): platform_arg += f"/{variant}" architecture = manifest["platform"]["architecture"] architecture = arch_name_aliases.get(architecture, architecture) async with sess.get( - self.registry_url / f"v2/{image}/manifests/{manifest["digest"]}", + self.registry_url / f"v2/{image}/manifests/{manifest['digest']}", **rqst_args, ) as resp: manifest_info = await resp.json() @@ -566,7 +566,7 @@ async def _read_manifest( architecture, manifest["digest"], ) - progress_msg = f"Updated {image}:{tag}/{architecture} ({manifest["digest"]})" + progress_msg = f"Updated {image}:{tag}/{architecture} ({manifest['digest']})" if (reporter := progress_reporter.get()) is not None: await reporter.update(1, message=progress_msg) diff --git a/src/ai/backend/manager/container_registry/docker.py b/src/ai/backend/manager/container_registry/docker.py index e64536b6b4..b391178c07 100644 --- a/src/ai/backend/manager/container_registry/docker.py +++ b/src/ai/backend/manager/container_registry/docker.py @@ -45,7 +45,7 @@ async def fetch_repositories_legacy( # skip legacy images if item["name"].startswith("kernel-"): continue - yield f"{username}/{item["name"]}" + yield f"{username}/{item['name']}" else: log.error( "Failed to fetch repository list from {0} (status={1})", diff --git a/src/ai/backend/manager/container_registry/github.py b/src/ai/backend/manager/container_registry/github.py index 08b4216972..0fcebf1365 100644 --- a/src/ai/backend/manager/container_registry/github.py +++ b/src/ai/backend/manager/container_registry/github.py @@ -41,7 +41,7 @@ async def fetch_repositories( if response.status == 200: data = await response.json() for repo in data: - yield f"{username}/{repo["name"]}" + yield f"{username}/{repo['name']}" if "next" in response.links: page += 1 else: diff --git a/src/ai/backend/manager/models/alembic/versions/b6b884fbae1f_add_session_table.py b/src/ai/backend/manager/models/alembic/versions/b6b884fbae1f_add_session_table.py index 938e08777d..84c6c5e8ca 100644 --- a/src/ai/backend/manager/models/alembic/versions/b6b884fbae1f_add_session_table.py +++ b/src/ai/backend/manager/models/alembic/versions/b6b884fbae1f_add_session_table.py @@ -40,7 +40,7 @@ def default_hostname(context) -> str: params = context.get_current_parameters() - return f"{params["cluster_role"]}{params["cluster_idx"]}" + return f"{params['cluster_role']}{params['cluster_idx']}" def upgrade() -> None: diff --git a/src/ai/backend/manager/models/dotfile.py b/src/ai/backend/manager/models/dotfile.py index 2f7a1f59f7..15b023c983 100644 --- a/src/ai/backend/manager/models/dotfile.py +++ b/src/ai/backend/manager/models/dotfile.py @@ -77,7 +77,7 @@ async def prepare_dotfiles( if dotfile_path in vfolder_kernel_paths: raise BackendError( "There is a kernel-side path from vfolders that conflicts with " - f"a dotfile '{dotfile["path"]}'.", + f"a dotfile '{dotfile['path']}'.", ) return internal_data diff --git a/src/ai/backend/manager/models/endpoint.py b/src/ai/backend/manager/models/endpoint.py index 63694a6ef0..70e96a0a8e 100644 --- a/src/ai/backend/manager/models/endpoint.py +++ b/src/ai/backend/manager/models/endpoint.py @@ -1111,10 +1111,10 @@ class ExtraMountInput(graphene.InputObjectType): vfolder_id = graphene.String() mount_destination = graphene.String() type = graphene.String( - description=f"Added in 24.03.4. Set bind type of this mount. Shoud be one of ({",".join([type_.value for type_ in MountTypes])}). Default is 'bind'." + description=f"Added in 24.03.4. Set bind type of this mount. Shoud be one of ({','.join([type_.value for type_ in MountTypes])}). Default is 'bind'." ) permission = graphene.String( - description=f"Added in 24.03.4. Set permission of this mount. Should be one of ({",".join([perm.value for perm in MountPermission])}). Default is null" + description=f"Added in 24.03.4. Set permission of this mount. Should be one of ({','.join([perm.value for perm in MountPermission])}). Default is null" ) diff --git a/src/ai/backend/manager/models/gql.py b/src/ai/backend/manager/models/gql.py index 81669804f1..ba3a84fa15 100644 --- a/src/ai/backend/manager/models/gql.py +++ b/src/ai/backend/manager/models/gql.py @@ -477,7 +477,7 @@ class Queries(graphene.ObjectType): graphene.String, default_value=[ProjectType.GENERAL.name], description=( - f"Added in 24.03.0. Available values: {", ".join([p.name for p in ProjectType])}" + f"Added in 24.03.0. Available values: {', '.join([p.name for p in ProjectType])}" ), ), ) @@ -500,13 +500,13 @@ class Queries(graphene.ObjectType): load_filters=graphene.List( graphene.String, default_value=None, - description=f"Added in 24.03.8. Allowed values are: [{", ".join([f.value for f in PublicImageLoadFilter])}]. When superuser queries with `customized` option set the resolver will return every customized images (including those not owned by callee). To resolve images owned by user only call `customized_images`.", + description=f"Added in 24.03.8. Allowed values are: [{', '.join([f.value for f in PublicImageLoadFilter])}]. When superuser queries with `customized` option set the resolver will return every customized images (including those not owned by callee). To resolve images owned by user only call `customized_images`.", ), image_filters=graphene.List( graphene.String, default_value=None, deprecation_reason="Deprecated since 24.03.8. Use `load_filters` instead.", - description=f"Added in 24.03.4. Allowed values are: [{", ".join([f.value for f in PublicImageLoadFilter])}]. When superuser queries with `customized` option set the resolver will return every customized images (including those not owned by caller). To list the owned images only call `customized_images`.", + description=f"Added in 24.03.4. Allowed values are: [{', '.join([f.value for f in PublicImageLoadFilter])}]. When superuser queries with `customized` option set the resolver will return every customized images (including those not owned by caller). To list the owned images only call `customized_images`.", ), ) @@ -2491,7 +2491,7 @@ async def resolve_quota_scope( qsid = QuotaScopeID.parse(quota_scope_id) volumes_by_host = await graph_ctx.storage_manager.get_all_volumes() for host, volume in volumes_by_host: - if f"{host}:{volume["name"]}" == storage_host_name: + if f"{host}:{volume['name']}" == storage_host_name: break else: raise ValueError(f"storage volume {storage_host_name} does not exist") diff --git a/src/ai/backend/manager/models/gql_models/group.py b/src/ai/backend/manager/models/gql_models/group.py index 4bd98da161..d4d5fab7fb 100644 --- a/src/ai/backend/manager/models/gql_models/group.py +++ b/src/ai/backend/manager/models/gql_models/group.py @@ -60,7 +60,7 @@ class GroupInput(graphene.InputObjectType): required=False, default_value="GENERAL", description=( - f"Added in 24.03.0. Available values: {", ".join([p.name for p in ProjectType])}" + f"Added in 24.03.0. Available values: {', '.join([p.name for p in ProjectType])}" ), ) description = graphene.String(required=False, default_value="") diff --git a/src/ai/backend/manager/models/gql_models/image.py b/src/ai/backend/manager/models/gql_models/image.py index 57bdeddbcc..1c5b9aade1 100644 --- a/src/ai/backend/manager/models/gql_models/image.py +++ b/src/ai/backend/manager/models/gql_models/image.py @@ -307,7 +307,7 @@ def matches_filter( ): return False if ImageLoadFilter.CUSTOMIZED in load_filters: - if label.value == f"user:{ctx.user["uuid"]}": + if label.value == f"user:{ctx.user['uuid']}": is_valid = True else: return False @@ -503,7 +503,7 @@ async def mutate( ) if ( not customized_image_owner - or customized_image_owner != f"user:{ctx.user["uuid"]}" + or customized_image_owner != f"user:{ctx.user['uuid']}" ): return ForgetImageById(ok=False, msg="Forbidden") await session.delete(image_row) @@ -550,7 +550,7 @@ async def mutate( ) if ( not customized_image_owner - or customized_image_owner != f"user:{ctx.user["uuid"]}" + or customized_image_owner != f"user:{ctx.user['uuid']}" ): return ForgetImage(ok=False, msg="Forbidden") await session.delete(image_row) @@ -604,7 +604,7 @@ async def mutate( ) if ( not customized_image_owner - or customized_image_owner != f"user:{ctx.user["uuid"]}" + or customized_image_owner != f"user:{ctx.user['uuid']}" ): return UntagImageFromRegistry(ok=False, msg="Forbidden") diff --git a/src/ai/backend/manager/models/gql_models/session.py b/src/ai/backend/manager/models/gql_models/session.py index 95edc71938..14e8cb9134 100644 --- a/src/ai/backend/manager/models/gql_models/session.py +++ b/src/ai/backend/manager/models/gql_models/session.py @@ -551,7 +551,7 @@ async def mutate_and_get_payload( if "priority" in input and input["priority"] is not graphql.Undefined: if not (SESSION_PRIORITY_MIN <= input["priority"] <= SESSION_PRIORITY_MAX): raise ValueError( - f"The priority value {input["priority"]!r} is out of range: " + f"The priority value {input['priority']!r} is out of range: " f"[{SESSION_PRIORITY_MIN}, {SESSION_PRIORITY_MAX}]." ) diff --git a/src/ai/backend/manager/models/group.py b/src/ai/backend/manager/models/group.py index 7c91c946b1..86600eacce 100644 --- a/src/ai/backend/manager/models/group.py +++ b/src/ai/backend/manager/models/group.py @@ -546,7 +546,7 @@ class GroupInput(graphene.InputObjectType): required=False, default_value="GENERAL", description=( - f"Added in 24.03.0. Available values: {", ".join([p.name for p in ProjectType])}" + f"Added in 24.03.0. Available values: {', '.join([p.name for p in ProjectType])}" ), ) description = graphene.String(required=False, default_value="") diff --git a/src/ai/backend/manager/models/kernel.py b/src/ai/backend/manager/models/kernel.py index e54f4827fd..623f12c70a 100644 --- a/src/ai/backend/manager/models/kernel.py +++ b/src/ai/backend/manager/models/kernel.py @@ -202,7 +202,7 @@ async def get_user_email( def default_hostname(context) -> str: params = context.get_current_parameters() - return f"{params["cluster_role"]}{params["cluster_idx"]}" + return f"{params['cluster_role']}{params['cluster_idx']}" KERNEL_STATUS_TRANSITION_MAP: Mapping[KernelStatus, set[KernelStatus]] = { @@ -696,9 +696,9 @@ async def set_kernel_status( reason: Optional[str] = None, status_changed_at: Optional[datetime] = None, ) -> None: - assert ( - status != KernelStatus.TERMINATED - ), "TERMINATED status update must be handled in mark_kernel_terminated()" + assert status != KernelStatus.TERMINATED, ( + "TERMINATED status update must be handled in mark_kernel_terminated()" + ) if status_changed_at is None: now = datetime.now(tzutc()) else: diff --git a/src/ai/backend/manager/models/storage.py b/src/ai/backend/manager/models/storage.py index 8a1a7a42e9..fa03091658 100644 --- a/src/ai/backend/manager/models/storage.py +++ b/src/ai/backend/manager/models/storage.py @@ -288,7 +288,7 @@ async def resolve_usage(self, info: graphene.ResolveInfo) -> Mapping[str, Any]: @classmethod def from_info(cls, proxy_name: str, volume_info: VolumeInfo) -> StorageVolume: return cls( - id=f"{proxy_name}:{volume_info["name"]}", + id=f"{proxy_name}:{volume_info['name']}", backend=volume_info["backend"], path=volume_info["path"], fsprefix=volume_info["fsprefix"], diff --git a/src/ai/backend/manager/models/vfolder.py b/src/ai/backend/manager/models/vfolder.py index 878706e88e..44bf09370d 100644 --- a/src/ai/backend/manager/models/vfolder.py +++ b/src/ai/backend/manager/models/vfolder.py @@ -977,7 +977,7 @@ async def prepare_vfolder_mounts( # Normal vfolders kernel_path_raw = requested_vfolder_dstpaths.get(key) if kernel_path_raw is None: - kernel_path = PurePosixPath(f"/home/work/{vfolder["name"]}") + kernel_path = PurePosixPath(f"/home/work/{vfolder['name']}") else: kernel_path = PurePosixPath(kernel_path_raw) if not kernel_path.is_absolute(): @@ -988,7 +988,7 @@ async def prepare_vfolder_mounts( case MountPermission.READ_WRITE | MountPermission.RW_DELETE: if vfolder["permission"] == VFolderPermission.READ_ONLY: raise VFolderPermissionError( - f"VFolder {vfolder_name} is allowed to be accessed in '{vfolder["permission"].value}' mode, " + f"VFolder {vfolder_name} is allowed to be accessed in '{vfolder['permission'].value}' mode, " f"but attempted with '{requested_perm.value}' mode." ) mount_perm = requested_perm @@ -1072,7 +1072,7 @@ async def _update() -> None: ), } if update_status == VFolderOperationStatus.DELETE_ONGOING: - values["name"] = VFolderRow.name + f"_deleted_{now.strftime("%Y-%m-%dT%H%M%S%z")}" + values["name"] = VFolderRow.name + f"_deleted_{now.strftime('%Y-%m-%dT%H%M%S%z')}" query = sa.update(vfolders).values(**values).where(cond) await db_session.execute(query) diff --git a/src/ai/backend/manager/openapi.py b/src/ai/backend/manager/openapi.py index f4abeaf830..6f059fa2af 100644 --- a/src/ai/backend/manager/openapi.py +++ b/src/ai/backend/manager/openapi.py @@ -278,7 +278,7 @@ def generate_openapi(subapps: list[web.Application], verbose=False) -> dict[str, else: preconds.append( "Manager status required: one of " - f"{", ".join([e.value.upper() for e in manager_status])}" + f"{', '.join([e.value.upper() for e in manager_status])}" ) if preconds: description.append("\n**Preconditions:**") diff --git a/src/ai/backend/manager/registry.py b/src/ai/backend/manager/registry.py index db1437719f..a3489c4247 100644 --- a/src/ai/backend/manager/registry.py +++ b/src/ai/backend/manager/registry.py @@ -788,7 +788,7 @@ async def create_cluster( if credential := git.get("credential"): proto, url = git["repository"].split("://") cmd_builder += ( - f'{proto}://{credential["username"]}:{credential["password"]}@{url}' + f"{proto}://{credential['username']}:{credential['password']}@{url}" ) else: cmd_builder += git["repository"] @@ -968,7 +968,7 @@ async def enqueue_session( if cluster_size > int(resource_policy["max_containers_per_session"]): raise QuotaExceeded( "You cannot create session with more than " - f"{resource_policy["max_containers_per_session"]} containers.", + f"{resource_policy['max_containers_per_session']} containers.", ) async with self.db.begin_readonly() as conn: @@ -1318,7 +1318,7 @@ async def enqueue_session( "cluster_idx": kernel["cluster_idx"], "local_rank": kernel["local_rank"], "cluster_hostname": ( - f"{kernel["cluster_role"]}{kernel["cluster_idx"]}" + f"{kernel['cluster_role']}{kernel['cluster_idx']}" if not kernel["cluster_hostname"] else kernel["cluster_hostname"] ), @@ -1396,7 +1396,7 @@ async def _post_enqueue() -> None: if getattr(e.orig, "pgcode", None) == "23503": match = re.search(r"Key \(agent\)=\((?P[^)]+)\)", repr(e.orig)) if match: - raise InvalidAPIParameters(f"No such agent: {match.group("agent")}") + raise InvalidAPIParameters(f"No such agent: {match.group('agent')}") else: raise InvalidAPIParameters("No such agent") raise diff --git a/src/ai/backend/manager/scheduler/dispatcher.py b/src/ai/backend/manager/scheduler/dispatcher.py index 833c617670..396abb0dd9 100644 --- a/src/ai/backend/manager/scheduler/dispatcher.py +++ b/src/ai/backend/manager/scheduler/dispatcher.py @@ -542,7 +542,7 @@ async def _schedule_in_sgroup( case str(): hook_name = hook_result.src_plugin case list(): - hook_name = f"({", ".join(hook_result.src_plugin)})" + hook_name = f"({', '.join(hook_result.src_plugin)})" case _: hook_name = "" if hook_result.status == PASSED: diff --git a/src/ai/backend/manager/server.py b/src/ai/backend/manager/server.py index 5ab64b3c1e..c695a4e131 100644 --- a/src/ai/backend/manager/server.py +++ b/src/ai/backend/manager/server.py @@ -258,7 +258,7 @@ async def exception_middleware( # NOTE: pydantic.ValidationError is handled in utils.pydantic_params_api_handler() except InvalidArgument as ex: if len(ex.args) > 1: - raise InvalidAPIParameters(f"{ex.args[0]}: {", ".join(map(str, ex.args[1:]))}") + raise InvalidAPIParameters(f"{ex.args[0]}: {', '.join(map(str, ex.args[1:]))}") elif len(ex.args) == 1: raise InvalidAPIParameters(ex.args[0]) else: diff --git a/src/ai/backend/storage/api/client.py b/src/ai/backend/storage/api/client.py index f9b526dc23..5fac813379 100644 --- a/src/ai/backend/storage/api/client.py +++ b/src/ai/backend/storage/api/client.py @@ -189,8 +189,8 @@ class Params(TypedDict): hdrs.ACCEPT_RANGES: "bytes", hdrs.CONTENT_LENGTH: str(file_path.stat().st_size), hdrs.LAST_MODIFIED: ( - f'{last_mdt.strftime("%a")}, {last_mdt.day} ' - f'{last_mdt.strftime("%b")} {last_mdt.year} ' + f"{last_mdt.strftime('%a')}, {last_mdt.day} " + f"{last_mdt.strftime('%b')} {last_mdt.year} " f"{last_mdt.hour}:{last_mdt.minute}:{last_mdt.second} GMT" ), }, diff --git a/src/ai/backend/storage/config.py b/src/ai/backend/storage/config.py index 43ac34860e..6bdb2e2bb4 100644 --- a/src/ai/backend/storage/config.py +++ b/src/ai/backend/storage/config.py @@ -147,7 +147,7 @@ def load_shared_config(local_config: dict[str, Any]) -> AsyncEtcd: } scope_prefix_map = { ConfigScopes.GLOBAL: "", - ConfigScopes.NODE: f"nodes/storage/{local_config["storage-proxy"]["node-id"]}", + ConfigScopes.NODE: f"nodes/storage/{local_config['storage-proxy']['node-id']}", } etcd = AsyncEtcd( local_config["etcd"]["addr"], diff --git a/src/ai/backend/storage/migration.py b/src/ai/backend/storage/migration.py index 682896f9f6..bc068531ed 100644 --- a/src/ai/backend/storage/migration.py +++ b/src/ai/backend/storage/migration.py @@ -182,8 +182,8 @@ def scan_vfolders(root: Path, *, depth: int = 0) -> Iterator[Path]: script = ( "#! /bin/sh\n", - *[f"mkdir -p {m["dst_path"].parent}\n" for m in migration_informations], - *[f"mv {m["src_path"]} {m["dst_path"]}\n" for m in migration_informations], + *[f"mkdir -p {m['dst_path'].parent}\n" for m in migration_informations], + *[f"mv {m['src_path']} {m['dst_path']}\n" for m in migration_informations], f"echo 3 > {volume_id}/version.txt\n", ) if outfile == "-": diff --git a/src/ai/backend/storage/netapp/netappclient.py b/src/ai/backend/storage/netapp/netappclient.py index 109d816c5f..005796cc1c 100644 --- a/src/ai/backend/storage/netapp/netappclient.py +++ b/src/ai/backend/storage/netapp/netappclient.py @@ -197,7 +197,7 @@ def check_job_result(result: AsyncJobResult, allowed_codes: Iterable[JobResponse if result["code"] in _allowed_codes: pass else: - raise NetAppClientError(f"{result["state"]} [{result["code"]}] {result["message"]}") + raise NetAppClientError(f"{result['state']} [{result['code']}] {result['message']}") async def get_volume_metadata(self, volume_id: VolumeID) -> Mapping[str, Any]: raise NotImplementedError @@ -497,7 +497,7 @@ async def update_quota_rule( record = await self._find_quota_rule(svm_id, volume_id, qtree_name) async with self.send_request( "patch", - f"/api/storage/quota/rules/{record["uuid"]}", + f"/api/storage/quota/rules/{record['uuid']}", data={ "space": { "hard_limit": config.limit_bytes, @@ -532,7 +532,7 @@ async def delete_quota_rule( record = await self._find_quota_rule(svm_id, volume_id, qtree_name) async with self.send_request( "delete", - f"/api/storage/quota/rules/{record["uuid"]}", + f"/api/storage/quota/rules/{record['uuid']}", ) as resp: data = await resp.json() return await self.wait_job(data["job"]["uuid"]) diff --git a/src/ai/backend/storage/vast/vastdata_client.py b/src/ai/backend/storage/vast/vastdata_client.py index 6104023407..41d2a845d5 100644 --- a/src/ai/backend/storage/vast/vastdata_client.py +++ b/src/ai/backend/storage/vast/vastdata_client.py @@ -152,7 +152,7 @@ def __init__( def _req_header(self) -> Mapping[str, str]: assert self._auth_token is not None return { - "Authorization": f"Bearer {self._auth_token["access_token"]}", + "Authorization": f"Bearer {self._auth_token['access_token']}", "Content-Type": "application/json", } diff --git a/src/ai/backend/storage/weka/__init__.py b/src/ai/backend/storage/weka/__init__.py index c21fece36f..6d4d0be75d 100644 --- a/src/ai/backend/storage/weka/__init__.py +++ b/src/ai/backend/storage/weka/__init__.py @@ -132,7 +132,7 @@ async def init(self) -> None: self._fs_uid = fs.uid break else: - raise WekaInitError(f"FileSystem {self.config["weka_fs_name"]} not found") + raise WekaInitError(f"FileSystem {self.config['weka_fs_name']} not found") await super().init() async def create_quota_model(self) -> AbstractQuotaModel: diff --git a/src/ai/backend/test/cli_integration/admin/test_domain.py b/src/ai/backend/test/cli_integration/admin/test_domain.py index 47524698e1..65dfc01628 100644 --- a/src/ai/backend/test/cli_integration/admin/test_domain.py +++ b/src/ai/backend/test/cli_integration/admin/test_domain.py @@ -57,19 +57,19 @@ def test_add_domain(run_admin: ClientRunnerFunc): assert test_domain.get("is_active") is False, "Domain active status mismatch" assert test_domain.get("total_resource_slots") == {}, "Domain total resource slots mismatch" - assert ( - vfolder_volume_name in allowed_vfolder_hosts_json - ), f"allowed_vfolder_hosts_json {vfolder_volume_name} is None" - assert ( - vfolder_volume_name in vfolder_host_perms_obj - ), f"vfolder_host_perms_obj {vfolder_volume_name} is None" + assert vfolder_volume_name in allowed_vfolder_hosts_json, ( + f"allowed_vfolder_hosts_json {vfolder_volume_name} is None" + ) + assert vfolder_volume_name in vfolder_host_perms_obj, ( + f"vfolder_host_perms_obj {vfolder_volume_name} is None" + ) assert set(allowed_vfolder_hosts_json[vfolder_volume_name]) == set( vfolder_host_perms_obj[vfolder_volume_name] ) "Domain allowed vfolder hosts mismatch" - assert test_domain.get("allowed_docker_registries") == [ - "cr.backend.ai" - ], "Domain allowed docker registries mismatch" + assert test_domain.get("allowed_docker_registries") == ["cr.backend.ai"], ( + "Domain allowed docker registries mismatch" + ) def test_update_domain(run_admin: ClientRunnerFunc): @@ -129,19 +129,19 @@ def test_update_domain(run_admin: ClientRunnerFunc): assert test_domain.get("is_active") is True, "Domain active status mismatch" assert test_domain.get("total_resource_slots") == {}, "Domain total resource slots mismatch" - assert ( - vfolder_volume_name in allowed_vfolder_hosts_json - ), f"allowed_vfolder_hosts_json {vfolder_volume_name} is None" - assert ( - vfolder_volume_name in vfolder_host_perms_obj - ), f"vfolder_host_perms_obj {vfolder_volume_name} is None" + assert vfolder_volume_name in allowed_vfolder_hosts_json, ( + f"allowed_vfolder_hosts_json {vfolder_volume_name} is None" + ) + assert vfolder_volume_name in vfolder_host_perms_obj, ( + f"vfolder_host_perms_obj {vfolder_volume_name} is None" + ) assert set(allowed_vfolder_hosts_json[vfolder_volume_name]) == set( vfolder_host_perms_obj[vfolder_volume_name] ), "Domain allowed vfolder hosts mismatch" - assert test_domain.get("allowed_docker_registries") == [ - "cr1.backend.ai" - ], "Domain allowed docker registries mismatch" + assert test_domain.get("allowed_docker_registries") == ["cr1.backend.ai"], ( + "Domain allowed docker registries mismatch" + ) def test_delete_domain(run_admin: ClientRunnerFunc): diff --git a/src/ai/backend/test/cli_integration/admin/test_keypair.py b/src/ai/backend/test/cli_integration/admin/test_keypair.py index c040bb657a..66bce8a0c4 100644 --- a/src/ai/backend/test/cli_integration/admin/test_keypair.py +++ b/src/ai/backend/test/cli_integration/admin/test_keypair.py @@ -99,17 +99,17 @@ def test_add_keypair( for i, (user, keypair_option) in enumerate(zip(users, keypair_options)): keypair = get_keypair_from_list(keypair_list, user.email) assert "access_key" in keypair, f"Keypair#{i + 1} doesn't exist" - assert ( - keypair.get("is_active") is keypair_option.is_active - ), f"Keypair#{i + 1} is_active mismatch" - assert ( - keypair.get("is_admin") is keypair_option.is_admin - ), f"Keypair#{i + 1} is_admin mismatch" + assert keypair.get("is_active") is keypair_option.is_active, ( + f"Keypair#{i + 1} is_active mismatch" + ) + assert keypair.get("is_admin") is keypair_option.is_admin, ( + f"Keypair#{i + 1} is_admin mismatch" + ) if (rate_limit := keypair_option.rate_limit) is not None: assert keypair.get("rate_limit") == rate_limit, f"Keypair#{i + 1} rate_limit mismatch" - assert ( - keypair.get("resource_policy") == keypair_option.resource_policy - ), f"Keypair#{i + 1} resource_policy mismatch" + assert keypair.get("resource_policy") == keypair_option.resource_policy, ( + f"Keypair#{i + 1} resource_policy mismatch" + ) def test_update_keypair( @@ -163,18 +163,18 @@ def test_update_keypair( for i, (user, new_keypair_option) in enumerate(zip(users, new_keypair_options)): updated_keypair = get_keypair_from_list(updated_keypair_list, user.email) assert "access_key" in updated_keypair, f"Keypair#{i + 1} doesn't exist" - assert ( - updated_keypair.get("is_active") is new_keypair_option.is_active - ), f"Keypair#{i + 1} is_active mismatch" - assert ( - updated_keypair.get("is_admin") is new_keypair_option.is_admin - ), f"Keypair#{i + 1} is_admin mismatch" - assert ( - updated_keypair.get("rate_limit") == new_keypair_option.rate_limit - ), f"Keypair#{i + 1} rate_limit mismatch" - assert ( - updated_keypair.get("resource_policy") == new_keypair_option.resource_policy - ), f"Keypair#{i + 1} resource_policy mismatch" + assert updated_keypair.get("is_active") is new_keypair_option.is_active, ( + f"Keypair#{i + 1} is_active mismatch" + ) + assert updated_keypair.get("is_admin") is new_keypair_option.is_admin, ( + f"Keypair#{i + 1} is_admin mismatch" + ) + assert updated_keypair.get("rate_limit") == new_keypair_option.rate_limit, ( + f"Keypair#{i + 1} rate_limit mismatch" + ) + assert updated_keypair.get("resource_policy") == new_keypair_option.resource_policy, ( + f"Keypair#{i + 1} resource_policy mismatch" + ) def test_delete_keypair(run_admin: ClientRunnerFunc, users: Tuple[User]): diff --git a/src/ai/backend/test/cli_integration/admin/test_keypair_resource_policy.py b/src/ai/backend/test/cli_integration/admin/test_keypair_resource_policy.py index a9a9122b9d..7e4880046b 100644 --- a/src/ai/backend/test/cli_integration/admin/test_keypair_resource_policy.py +++ b/src/ai/backend/test/cli_integration/admin/test_keypair_resource_policy.py @@ -56,25 +56,25 @@ def test_add_keypair_resource_policy(run_admin: ClientRunnerFunc, keypair_resour allowed_vfolder_hosts_json = json.loads(allowed_vfolder_hosts_str) assert bool(test_krp), "Test keypair resource policy doesn't exist" - assert ( - test_krp.get("total_resource_slots") == "{}" - ), "Test keypair resource policy total resource slot mismatch" - assert ( - test_krp.get("max_concurrent_sessions") == 20 - ), "Test keypair resource policy max concurrent session mismatch" - assert ( - test_krp.get("idle_timeout") == 1200 - ), "Test keypair resource policy idle timeout mismatch" - assert ( - test_krp.get("max_containers_per_session") == 2 - ), "Test keypair resouce policy max containers per session mismatch" - - assert ( - vfolder_volume_name in allowed_vfolder_hosts_json - ), f"allowed_vfolder_hosts_json {vfolder_volume_name} is None" - assert ( - vfolder_volume_name in vfolder_host_perms_obj - ), f"vfolder_host_perms_obj {vfolder_volume_name} is None" + assert test_krp.get("total_resource_slots") == "{}", ( + "Test keypair resource policy total resource slot mismatch" + ) + assert test_krp.get("max_concurrent_sessions") == 20, ( + "Test keypair resource policy max concurrent session mismatch" + ) + assert test_krp.get("idle_timeout") == 1200, ( + "Test keypair resource policy idle timeout mismatch" + ) + assert test_krp.get("max_containers_per_session") == 2, ( + "Test keypair resouce policy max containers per session mismatch" + ) + + assert vfolder_volume_name in allowed_vfolder_hosts_json, ( + f"allowed_vfolder_hosts_json {vfolder_volume_name} is None" + ) + assert vfolder_volume_name in vfolder_host_perms_obj, ( + f"vfolder_host_perms_obj {vfolder_volume_name} is None" + ) assert set(allowed_vfolder_hosts_json[vfolder_volume_name]) == set( vfolder_host_perms_obj[vfolder_volume_name] @@ -133,25 +133,25 @@ def test_update_keypair_resource_policy(run_admin: ClientRunnerFunc, keypair_res allowed_vfolder_hosts_json = json.loads(allowed_vfolder_hosts_str) assert bool(test_krp), "Test keypair resource policy doesn't exist" - assert ( - test_krp.get("total_resource_slots") == "{}" - ), "Test keypair resource policy total resource slot mismatch" - assert ( - test_krp.get("max_concurrent_sessions") == 30 - ), "Test keypair resource policy max concurrent session mismatch" - assert ( - test_krp.get("idle_timeout") == 1800 - ), "Test keypair resource policy idle timeout mismatch" - assert ( - test_krp.get("max_containers_per_session") == 1 - ), "Test keypair resouce policy max containers per session mismatch" - - assert ( - vfolder_volume_name in allowed_vfolder_hosts_json - ), f"allowed_vfolder_hosts_json {vfolder_volume_name} is None" - assert ( - vfolder_volume_name in vfolder_host_perms_obj - ), f"vfolder_host_perms_obj {vfolder_volume_name} is None" + assert test_krp.get("total_resource_slots") == "{}", ( + "Test keypair resource policy total resource slot mismatch" + ) + assert test_krp.get("max_concurrent_sessions") == 30, ( + "Test keypair resource policy max concurrent session mismatch" + ) + assert test_krp.get("idle_timeout") == 1800, ( + "Test keypair resource policy idle timeout mismatch" + ) + assert test_krp.get("max_containers_per_session") == 1, ( + "Test keypair resouce policy max containers per session mismatch" + ) + + assert vfolder_volume_name in allowed_vfolder_hosts_json, ( + f"allowed_vfolder_hosts_json {vfolder_volume_name} is None" + ) + assert vfolder_volume_name in vfolder_host_perms_obj, ( + f"vfolder_host_perms_obj {vfolder_volume_name} is None" + ) assert set(allowed_vfolder_hosts_json[vfolder_volume_name]) == set( vfolder_host_perms_obj[vfolder_volume_name] ), "Test keypair resource policy allowed vfolder hosts mismatch" diff --git a/src/ai/backend/test/cli_integration/admin/test_scaling_group.py b/src/ai/backend/test/cli_integration/admin/test_scaling_group.py index e5f16c4f74..a1122a2bc6 100644 --- a/src/ai/backend/test/cli_integration/admin/test_scaling_group.py +++ b/src/ai/backend/test/cli_integration/admin/test_scaling_group.py @@ -52,9 +52,9 @@ def test_add_scaling_group(run_admin: ClientRunnerFunc): assert isinstance(scaling_group_list, list), "Scaling group info not printed properly" test_group = get_scaling_group_from_list(scaling_group_list, "test_group1") - assert ( - test_group.get("description") == "Test scaling group" - ), "Scaling group description mismatch" + assert test_group.get("description") == "Test scaling group", ( + "Scaling group description mismatch" + ) assert test_group.get("is_active") is False, "Scaling group active status mismatch" assert test_group.get("driver") == "static", "Scaling group driver mismatch" assert test_group.get("driver_opts") == {"x": 1}, "Scaling group driver options mismatch" @@ -104,9 +104,9 @@ def test_update_scaling_group(run_admin: ClientRunnerFunc): test_group = get_scaling_group_from_list(scaling_group_list, "test_group1") assert bool(test_group), "Test scaling group doesn't exist" - assert ( - test_group.get("description") == "Test scaling group updated" - ), "Scaling group description mismatch" + assert test_group.get("description") == "Test scaling group updated", ( + "Scaling group description mismatch" + ) assert test_group.get("is_active") is True, "Scaling group active status mismatch" assert test_group.get("driver") == "non-static", "Scaling group driver mismatch" assert test_group.get("driver_opts") == {"x": 1}, "Scaling group driver options mismatch" diff --git a/src/ai/backend/test/cli_integration/admin/test_user.py b/src/ai/backend/test/cli_integration/admin/test_user.py index c2476eeef6..6ad9e4fb5a 100644 --- a/src/ai/backend/test/cli_integration/admin/test_user.py +++ b/src/ai/backend/test/cli_integration/admin/test_user.py @@ -54,9 +54,9 @@ def test_add_user(run_admin: ClientRunnerFunc, users: Tuple[User, ...]): assert added_user.get("full_name") == user.full_name, f"Full name mismatch: Account#{i + 1}" assert added_user.get("status") == user.status, f"User status mismatch: Account#{i + 1}" assert added_user.get("role") == user.role, f"Role mismatch: Account#{i + 1}" - assert ( - added_user.get("need_password_change") is user.need_password_change - ), f"Password change status mismatch: Account#{i + 1}" + assert added_user.get("need_password_change") is user.need_password_change, ( + f"Password change status mismatch: Account#{i + 1}" + ) def test_update_user( @@ -122,19 +122,19 @@ def test_update_user( for i, updated_user in enumerate(updated_users): user_dict: dict = get_user_from_list(updated_user_list, updated_user.username) assert bool(user_dict), f"Account not found - Account#{i + 1}" - assert ( - user_dict.get("full_name") == updated_user.full_name - ), f"Full name mismatch: Account#{i + 1}" - assert ( - user_dict.get("status") == updated_user.status - ), f"User status mismatch: Account#{i + 1}" + assert user_dict.get("full_name") == updated_user.full_name, ( + f"Full name mismatch: Account#{i + 1}" + ) + assert user_dict.get("status") == updated_user.status, ( + f"User status mismatch: Account#{i + 1}" + ) assert user_dict.get("role") == updated_user.role, f"Role mismatch: Account#{i + 1}" - assert ( - user_dict.get("need_password_change") is updated_user.need_password_change - ), f"Password change status mismatch: Account#{i + 1}" - assert ( - user_dict.get("domain_name") == updated_user.domain_name - ), f"Domain mismatch: Account#{i + 1}" + assert user_dict.get("need_password_change") is updated_user.need_password_change, ( + f"Password change status mismatch: Account#{i + 1}" + ) + assert user_dict.get("domain_name") == updated_user.domain_name, ( + f"Domain mismatch: Account#{i + 1}" + ) def test_delete_user(run_admin: ClientRunnerFunc, users: Tuple[User, ...]): diff --git a/src/ai/backend/test/cli_integration/user/test_vfolder.py b/src/ai/backend/test/cli_integration/user/test_vfolder.py index bad3cc4cee..a6d1315954 100644 --- a/src/ai/backend/test/cli_integration/user/test_vfolder.py +++ b/src/ai/backend/test/cli_integration/user/test_vfolder.py @@ -24,15 +24,15 @@ def test_create_vfolder(run_user: ClientRunnerFunc): # Create vfolder with closing(run_user(["vfolder", "create", "-p", "rw", "test_folder1", "local:volume1"])) as p: p.expect(EOF) - assert ( - 'Virtual folder "test_folder1" is created' in p.before.decode() - ), "Test folder1 not created successfully." + assert 'Virtual folder "test_folder1" is created' in p.before.decode(), ( + "Test folder1 not created successfully." + ) with closing(run_user(["vfolder", "create", "-p", "ro", "test_folder2", "local:volume1"])) as p: p.expect(EOF) - assert ( - 'Virtual folder "test_folder2" is created' in p.before.decode() - ), "Test folder2 not created successfully." + assert 'Virtual folder "test_folder2" is created' in p.before.decode(), ( + "Test folder2 not created successfully." + ) # Check if vfolder is created with closing(run_user(["--output=json", "vfolder", "list"])) as p: @@ -164,16 +164,16 @@ def test_mkdir_vfolder(run_user: ClientRunnerFunc): # Create already existing directory with exist-ok option with closing(run_user(["vfolder", "mkdir", "-e", vfolder_name, dir_paths[0]])) as p: p.expect(EOF) - assert ( - "Successfully created" in p.before.decode() - ), "Exist-ok option does not work properly." + assert "Successfully created" in p.before.decode(), ( + "Exist-ok option does not work properly." + ) # Test whether the parent directory is created automatically with closing(run_user(["vfolder", "mkdir", "-p", vfolder_name, dir_paths[1]])) as p: p.expect(EOF) - assert ( - "Successfully created" in p.before.decode() - ), "The parent directory is not created automatically." + assert "Successfully created" in p.before.decode(), ( + "The parent directory is not created automatically." + ) @pytest.mark.dependency( @@ -239,15 +239,15 @@ def test_delete_vfolder_the_same_vfolder_name( with closing(run_user(["vfolder", "delete", vfolder_name])) as p: p.expect(EOF) - assert ( - "Deleted" in p.before.decode() - ), "Test folder created by user not deleted successfully." + assert "Deleted" in p.before.decode(), ( + "Test folder created by user not deleted successfully." + ) with closing(run_user2(["vfolder", "delete", vfolder_name])) as p: p.expect(EOF) - assert ( - "Deleted" in p.before.decode() - ), "Test folder created by user2 not deleted successfully." + assert "Deleted" in p.before.decode(), ( + "Test folder created by user2 not deleted successfully." + ) def test_list_vfolder(run_user: ClientRunnerFunc): diff --git a/src/ai/backend/web/server.py b/src/ai/backend/web/server.py index 05ec559c05..6e01c430e1 100644 --- a/src/ai/backend/web/server.py +++ b/src/ai/backend/web/server.py @@ -307,7 +307,7 @@ async def login_handler(request: web.Request) -> web.Response: async def _get_login_history(): login_history = await request.app["redis"].get( - f'login_history_{creds["username"]}', + f"login_history_{creds['username']}", ) if not login_history: login_history = { @@ -326,7 +326,7 @@ async def _set_login_history(last_login_attempt, login_fail_count): """ Set login history per email (not in browser session). """ - key = f'login_history_{creds["username"]}' + key = f"login_history_{creds['username']}" value = json.dumps({ "last_login_attempt": last_login_attempt, "login_fail_count": login_fail_count, @@ -802,7 +802,7 @@ def main( ) with logger: setproctitle( - f"backend.ai: webserver {cfg["service"]["ip"]}:{cfg["service"]["port"]}" + f"backend.ai: webserver {cfg['service']['ip']}:{cfg['service']['port']}" ) log.info("Backend.AI Web Server {0}", __version__) log.info("runtime: {0}", sys.prefix) diff --git a/src/ai/backend/wsproxy/proxy/backend/http.py b/src/ai/backend/wsproxy/proxy/backend/http.py index e02710af67..f389d0f11c 100644 --- a/src/ai/backend/wsproxy/proxy/backend/http.py +++ b/src/ai/backend/wsproxy/proxy/backend/http.py @@ -110,7 +110,7 @@ async def proxy_http(self, request: web.Request) -> web.StreamResponse: headers["forwarded"] = f"host={host};proto={protocol}" headers["x-forwarded-host"] = host if self.circuit.app == "rstudio": - headers["x-rstudio-request"] = f"{protocol}://{host}{request.path or ""}" + headers["x-rstudio-request"] = f"{protocol}://{host}{request.path or ''}" split = host.split(":") if len(split) >= 2: headers["x-forwarded-port"] = split[1] diff --git a/tests/common/redis_helper/docker.py b/tests/common/redis_helper/docker.py index 772b0d12db..92e288c756 100644 --- a/tests/common/redis_helper/docker.py +++ b/tests/common/redis_helper/docker.py @@ -289,7 +289,7 @@ async def make_cluster(self) -> AsyncIterator[RedisClusterInfo]: container["Id"] ) if self.verbose: - print(f"--- logs of {container["Id"]} ---") + print(f"--- logs of {container['Id']} ---") try: p = await simple_run_cmd(["docker", "logs", container["Id"]]) finally: diff --git a/tests/common/test_docker.py b/tests/common/test_docker.py index 022acbb716..6862cd625d 100644 --- a/tests/common/test_docker.py +++ b/tests/common/test_docker.py @@ -375,7 +375,7 @@ def test_image_ref_generate_aliases(): for name, ptags in itertools.product( possible_names, itertools.product(*possible_platform_tags) ): - assert f"{name}:{"-".join(t for t in ptags if t)}" in aliases + assert f"{name}:{'-'.join(t for t in ptags if t)}" in aliases def test_image_ref_generate_aliases_with_accelerator(): @@ -399,7 +399,7 @@ def test_image_ref_generate_aliases_with_accelerator(): for name, ptags in itertools.product( possible_names, itertools.product(*possible_platform_tags) ): - assert f"{name}:{"-".join(t for t in ptags if t)}" in aliases + assert f"{name}:{'-'.join(t for t in ptags if t)}" in aliases def test_image_ref_generate_aliases_of_names(): diff --git a/tests/manager/conftest.py b/tests/manager/conftest.py index a474ed0d05..51233ca973 100644 --- a/tests/manager/conftest.py +++ b/tests/manager/conftest.py @@ -670,7 +670,7 @@ def create_header( ) -> dict[str, str]: now = datetime.now(tzutc()) root_ctx: RootContext = app["_root.context"] - hostname = f"127.0.0.1:{root_ctx.local_config["manager"]["service-addr"].port}" + hostname = f"127.0.0.1:{root_ctx.local_config['manager']['service-addr'].port}" headers = { "Date": now.isoformat(), "Content-Type": ctype, @@ -714,7 +714,7 @@ def create_header( signature = hmac.new(sign_key, sign_bytes, hash_type).hexdigest() headers["Authorization"] = ( f"BackendAI signMethod=HMAC-{hash_type.upper()}, " - + f'credential={keypair["access_key"]}:{signature}' + + f"credential={keypair['access_key']}:{signature}" ) return headers diff --git a/tests/manager/test_agent_selector.py b/tests/manager/test_agent_selector.py index 6b544d45f9..3cafbb1b8f 100644 --- a/tests/manager/test_agent_selector.py +++ b/tests/manager/test_agent_selector.py @@ -170,8 +170,8 @@ async def test_agent_selection_strategy_rr_skip_unacceptable_agents() -> None: for ag in agents: print( ag.id, - f"{ag.occupied_slots["cpu"]}/{ag.available_slots["cpu"]}", - f"{ag.occupied_slots["mem"]}/{ag.available_slots["mem"]}", + f"{ag.occupied_slots['cpu']}/{ag.available_slots['cpu']}", + f"{ag.occupied_slots['mem']}/{ag.available_slots['mem']}", ) # As more sessions have the assigned agents, the remaining capacity diminishes # and the range of round-robin also becomes limited. From ec46ae2361e357ee74f6d3555a42fd93d4014e67 Mon Sep 17 00:00:00 2001 From: Joongi Kim Date: Thu, 2 Jan 2025 23:27:59 +0900 Subject: [PATCH 7/9] style: Keep multiline formatting for GQL queries --- src/ai/backend/client/func/image.py | 105 +++++++++++++++++++--------- src/ai/backend/client/utils.py | 7 +- 2 files changed, 77 insertions(+), 35 deletions(-) diff --git a/src/ai/backend/client/func/image.py b/src/ai/backend/client/func/image.py index 6fe2757b13..e045e6a3c4 100644 --- a/src/ai/backend/client/func/image.py +++ b/src/ai/backend/client/func/image.py @@ -5,6 +5,7 @@ from ..request import Request from ..session import api_session +from ..utils import dedent as _d from .base import BaseFunction, api_function __all__ = ("Image",) @@ -37,7 +38,13 @@ async def list( """ Fetches the list of registered images in this cluster. """ - q = "query($is_operation: Boolean) { images(is_operation: $is_operation) { $fields }}" + q = _d(""" + query($is_operation: Boolean) { + images(is_operation: $is_operation) { + $fields + } + } + """) q = q.replace("$fields", " ".join(f.field_ref for f in fields)) variables = { "is_operation": operation, @@ -56,13 +63,13 @@ async def get( """ Fetches the information about registered image in this cluster. """ - q = ( - "query($reference: String!, $architecture: String!) {" - " image(reference: $reference, architecture: $architecture) {" - " $fields" - " }" - "}" - ) + q = _d(""" + query($reference: String!, $architecture: String!) { + image(reference: $reference, architecture: $architecture) { + $fields" + } + } + """) q = q.replace("$fields", " ".join(f.field_ref for f in fields)) variables = { "reference": reference, @@ -81,7 +88,13 @@ async def get_by_id( """ Fetches the information about registered image in this cluster. """ - q = "query($id: String!) { image(id: $id) { $fields }}" + q = _d(""" + query($id: String!) { + image(id: $id) { + $fields + } + } + """) q = q.replace("$fields", " ".join(f.field_ref for f in fields)) variables = { "id": id, @@ -98,7 +111,13 @@ async def list_customized( """ Fetches the list of customized images in this cluster. """ - q = "query { customized_images { $fields }}" + q = _d(""" + query { + customized_images { + $fields + } + } + """) q = q.replace("$fields", " ".join(f.field_ref for f in fields)) data = await api_session.get().Admin._query(q, {}) return data["customized_images"] @@ -106,13 +125,13 @@ async def list_customized( @api_function @classmethod async def rescan_images(cls, registry: str): - q = ( - "mutation($registry: String) {" - " rescan_images(registry:$registry) {" - " ok msg task_id" - " }" - "}" - ) + q = _d(""" + mutation($registry: String) { + rescan_images(registry:$registry) { + ok msg task_id + } + } + """) variables = { "registry": registry, } @@ -122,7 +141,13 @@ async def rescan_images(cls, registry: str): @api_function @classmethod async def forget_image_by_id(cls, image_id: str): - q = "mutation($image_id: String!) { forget_image_by_id(image_id: $image_id) { ok msg }}" + q = _d(""" + mutation($image_id: String!) { + forget_image_by_id(image_id: $image_id) { + ok msg + } + } + """) variables = { "image_id": image_id, } @@ -132,7 +157,13 @@ async def forget_image_by_id(cls, image_id: str): @api_function @classmethod async def untag_image_from_registry(cls, id: str): - q = "mutation($id: String!) { untag_image_from_registry(id: $id) { ok msg }}" + q = _d(""" + mutation($id: String!) { + untag_image_from_registry(id: $id) { + ok msg + } + } + """) variables = { "id": id, } @@ -142,13 +173,13 @@ async def untag_image_from_registry(cls, id: str): @api_function @classmethod async def forget_image(cls, reference: str, architecture: str): - q = ( - "mutation($reference: String!, $architecture: String!) {" - " forget_image(reference: $reference, architecture: $architecture) {" - " ok msg" - " }" - "}" - ) + q = _d(""" + mutation($reference: String!, $architecture: String!) { + forget_image(reference: $reference, architecture: $architecture) { + ok msg + } + } + """) variables = { "reference": reference, "architecture": architecture, @@ -164,13 +195,13 @@ async def alias_image( target: str, arch: Optional[str] = None, ) -> dict: - q = ( - "mutation($alias: String!, $target: String!) {" - " alias_image(alias: $alias, target: $target) {" - " ok msg" - " }" - "}" - ) + q = _d(""" + mutation($alias: String!, $target: String!) { + alias_image(alias: $alias, target: $target) { + ok msg + } + } + """) variables = { "alias": alias, "target": target, @@ -183,7 +214,13 @@ async def alias_image( @api_function @classmethod async def dealias_image(cls, alias: str) -> dict: - q = "mutation($alias: String!) { dealias_image(alias: $alias) { ok msg }}" + q = _d(""" + mutation($alias: String!) { + dealias_image(alias: $alias) { + ok msg + } + } + """) variables = { "alias": alias, } diff --git a/src/ai/backend/client/utils.py b/src/ai/backend/client/utils.py index b95fc5c9b8..fac5bc1c75 100644 --- a/src/ai/backend/client/utils.py +++ b/src/ai/backend/client/utils.py @@ -1,7 +1,10 @@ import io import os +import textwrap -from tqdm import tqdm + +def dedent(text: str) -> str: + return textwrap.dedent(text).strip() class ProgressReportingReader(io.BufferedReader): @@ -9,6 +12,8 @@ def __init__(self, file_path, *, tqdm_instance=None): super().__init__(open(file_path, "rb")) self._filename = os.path.basename(file_path) if tqdm_instance is None: + from tqdm import tqdm + self._owns_tqdm = True self.tqdm = tqdm( unit="bytes", From 7e95cdbba9340bdf46dc0f1ec743c9397dc0a5b8 Mon Sep 17 00:00:00 2001 From: Joongi Kim Date: Thu, 2 Jan 2025 23:57:01 +0900 Subject: [PATCH 8/9] style: Make GraphQL literals more readable and easier to write --- src/ai/backend/client/func/acl.py | 15 ++-- src/ai/backend/client/func/agent.py | 19 ++--- src/ai/backend/client/func/domain.py | 61 +++++++------ src/ai/backend/client/func/group.py | 56 +++++------- src/ai/backend/client/func/image.py | 60 ++++++------- src/ai/backend/client/func/keypair.py | 85 ++++++++++++------- .../client/func/keypair_resource_policy.py | 45 ++++++---- src/ai/backend/client/func/network.py | 45 ++++++---- src/ai/backend/client/func/quota_scope.py | 42 ++++----- src/ai/backend/client/func/scaling_group.py | 72 ++++++---------- src/ai/backend/client/func/service.py | 22 +++-- src/ai/backend/client/func/session.py | 8 +- src/ai/backend/client/func/storage.py | 19 ++--- src/ai/backend/client/func/user.py | 68 ++++++--------- 14 files changed, 289 insertions(+), 328 deletions(-) diff --git a/src/ai/backend/client/func/acl.py b/src/ai/backend/client/func/acl.py index 233b0d5004..0058f0a0da 100644 --- a/src/ai/backend/client/func/acl.py +++ b/src/ai/backend/client/func/acl.py @@ -1,10 +1,9 @@ -import textwrap from typing import Sequence -from ai.backend.client.output.fields import permission_fields -from ai.backend.client.output.types import FieldSpec - +from ..output.fields import permission_fields +from ..output.types import FieldSpec from ..session import api_session +from ..utils import dedent as _d from .base import BaseFunction, api_function __all__ = ("Permission",) @@ -24,13 +23,11 @@ async def list( :param fields: Additional permission query fields to fetch. """ - query = textwrap.dedent( - """\ + query = _d(""" query { - vfolder_host_permissions {$fields} + vfolder_host_permissions { $fields } } - """ - ) + """) query = query.replace("$fields", " ".join(f.field_ref for f in fields)) data = await api_session.get().Admin._query(query) return data["vfolder_host_permissions"] diff --git a/src/ai/backend/client/func/agent.py b/src/ai/backend/client/func/agent.py index 68c80df8cd..ecd685bd2f 100644 --- a/src/ai/backend/client/func/agent.py +++ b/src/ai/backend/client/func/agent.py @@ -1,14 +1,13 @@ from __future__ import annotations -import textwrap from typing import Optional, Sequence -from ai.backend.client.output.fields import agent_fields -from ai.backend.client.output.types import FieldSpec, PaginatedResult -from ai.backend.client.pagination import fetch_paginated_result -from ai.backend.client.request import Request -from ai.backend.client.session import api_session - +from ..output.fields import agent_fields +from ..output.types import FieldSpec, PaginatedResult +from ..pagination import fetch_paginated_result +from ..request import Request +from ..session import api_session +from ..utils import dedent as _d from .base import BaseFunction, api_function __all__ = ( @@ -88,13 +87,11 @@ async def detail( agent_id: str, fields: Sequence[FieldSpec] = _default_detail_fields, ) -> Sequence[dict]: - query = textwrap.dedent( - """\ + query = _d(""" query($agent_id: String!) { agent(agent_id: $agent_id) {$fields} } - """ - ) + """) query = query.replace("$fields", " ".join(f.field_ref for f in fields)) variables = {"agent_id": agent_id} data = await api_session.get().Admin._query(query, variables) diff --git a/src/ai/backend/client/func/domain.py b/src/ai/backend/client/func/domain.py index 1891ec1d28..61c3980aa1 100644 --- a/src/ai/backend/client/func/domain.py +++ b/src/ai/backend/client/func/domain.py @@ -1,4 +1,3 @@ -import textwrap from typing import Any, Iterable, Sequence from ...cli.types import Undefined, undefined @@ -6,6 +5,7 @@ from ..output.types import FieldSpec from ..session import api_session from ..types import set_if_set +from ..utils import dedent as _d from .base import BaseFunction, api_function, resolve_fields __all__ = ("Domain",) @@ -56,13 +56,11 @@ async def list( :param fields: Additional per-domain query fields to fetch. """ - query = textwrap.dedent( - """\ + query = _d(""" query { - domains {$fields} + domains { $fields } } - """ - ) + """) query = query.replace("$fields", " ".join(f.field_ref for f in fields)) data = await api_session.get().Admin._query(query) return data["domains"] @@ -75,18 +73,16 @@ async def detail( fields: Sequence[FieldSpec] = _default_detail_fields, ) -> dict: """ - Fetch information of a domain with name. + Retrieves the detail of a domain with name. :param name: Name of the domain to fetch. :param fields: Additional per-domain query fields to fetch. """ - query = textwrap.dedent( - """\ + query = _d(""" query($name: String) { - domain(name: $name) {$fields} + domain(name: $name) { $fields } } - """ - ) + """) query = query.replace("$fields", " ".join(f.field_ref for f in fields)) variables = {"name": name} data = await api_session.get().Admin._query(query, variables) @@ -108,17 +104,16 @@ async def create( ) -> dict: """ Creates a new domain with the given options. + You need an admin privilege for this operation. """ - query = textwrap.dedent( - """\ + query = _d(""" mutation($name: String!, $input: DomainInput!) { create_domain(name: $name, props: $input) { - ok msg domain {$fields} + ok msg domain { $fields } } } - """ - ) + """) resolved_fields = resolve_fields(fields, domain_fields, (domain_fields["name"],)) query = query.replace("$fields", " ".join(resolved_fields)) inputs = { @@ -152,18 +147,17 @@ async def update( fields: Iterable[FieldSpec | str] | None = None, ) -> dict: """ - Update existing domain. + Updates an existing domain. + You need an admin privilege for this operation. """ - query = textwrap.dedent( - """\ + query = _d(""" mutation($name: String!, $input: ModifyDomainInput!) { modify_domain(name: $name, props: $input) { ok msg } } - """ - ) + """) inputs: dict[str, Any] = {} set_if_set(inputs, "name", new_name) set_if_set(inputs, "description", description) @@ -183,17 +177,19 @@ async def update( @classmethod async def delete(cls, name: str): """ - Deletes an existing domain. This action only deletes the primary record and might leave behind some associated data or metadata that can be manually cleaned up or ignored. Ideal for removing items that may be re-created or restored. + Deletes an existing domain. + + This action only deletes the primary record and might leave behind some associated data or + metadata that can be manually cleaned up or ignored. Ideal for removing items that may be + re-created or restored. """ - query = textwrap.dedent( - """\ + query = _d(""" mutation($name: String!) { delete_domain(name: $name) { ok msg } } - """ - ) + """) variables = {"name": name} data = await api_session.get().Admin._query(query, variables) return data["delete_domain"] @@ -202,17 +198,18 @@ async def delete(cls, name: str): @classmethod async def purge(cls, name: str): """ - Purges an existing domain. This action is irreversible and should be used when you need to ensure that no trace of the resource remains. + Purges an existing domain. + + This action is irreversible and should be used when you need to ensure that no trace of the + resource remains. """ - query = textwrap.dedent( - """\ + query = _d(""" mutation($name: String!) { purge_domain(name: $name) { ok msg } } - """ - ) + """) variables = {"name": name} data = await api_session.get().Admin._query(query, variables) return data["purge_domain"] diff --git a/src/ai/backend/client/func/group.py b/src/ai/backend/client/func/group.py index 9590ee948d..434460b9cf 100644 --- a/src/ai/backend/client/func/group.py +++ b/src/ai/backend/client/func/group.py @@ -1,4 +1,3 @@ -import textwrap from typing import Any, Iterable, Optional, Sequence from ai.backend.client.output.fields import group_fields @@ -7,6 +6,7 @@ from ...cli.types import Undefined, undefined from ..session import api_session from ..types import set_if_set +from ..utils import dedent as _d from .base import BaseFunction, api_function, resolve_fields __all__ = ("Group",) @@ -60,13 +60,11 @@ async def from_name( :param domain_name: Name of domain to get groups from. :param fields: Per-group query fields to fetch. """ - query = textwrap.dedent( - """\ + query = _d(""" query($name: String!, $domain_name: String) { groups_by_name(name: $name, domain_name: $domain_name) {$fields} } - """ - ) + """) resolved_fields = resolve_fields(fields, group_fields, _default_detail_fields) query = query.replace("$fields", " ".join(resolved_fields)) variables = { @@ -91,13 +89,11 @@ async def list( """ if fields is None: fields = _default_list_fields - query = textwrap.dedent( - """\ + query = _d(""" query($domain_name: String) { groups(domain_name: $domain_name) {$fields} } - """ - ) + """) query = query.replace("$fields", " ".join(f.field_ref for f in fields)) variables = {"domain_name": domain_name} data = await api_session.get().Admin._query(query, variables) @@ -118,13 +114,11 @@ async def detail( """ if fields is None: fields = _default_detail_fields - query = textwrap.dedent( - """\ + query = _d(""" query($gid: UUID!) { group(id: $gid) {$fields} } - """ - ) + """) query = query.replace("$fields", " ".join(f.field_ref for f in fields)) variables = {"gid": gid} data = await api_session.get().Admin._query(query, variables) @@ -148,15 +142,13 @@ async def create( Creates a new group with the given options. You need an admin privilege for this operation. """ - query = textwrap.dedent( - """\ + query = _d(""" mutation($name: String!, $input: GroupInput!) { create_group(name: $name, props: $input) { ok msg group {$fields} } } - """ - ) + """) resolved_fields = resolve_fields( fields, group_fields, @@ -195,15 +187,13 @@ async def update( Update existing group. You need an admin privilege for this operation. """ - query = textwrap.dedent( - """\ + query = _d(""" mutation($gid: UUID!, $input: ModifyGroupInput!) { modify_group(gid: $gid, props: $input) { ok msg } } - """ - ) + """) inputs: dict[str, Any] = {} set_if_set(inputs, "name", name) set_if_set(inputs, "description", description) @@ -224,15 +214,13 @@ async def delete(cls, gid: str): """ Inactivates the existing group. Does not actually delete it for safety. """ - query = textwrap.dedent( - """\ + query = _d(""" mutation($gid: UUID!) { delete_group(gid: $gid) { ok msg } } - """ - ) + """) variables = {"gid": gid} data = await api_session.get().Admin._query(query, variables) return data["delete_group"] @@ -243,15 +231,13 @@ async def purge(cls, gid: str): """ Delete the existing group. This action cannot be undone. """ - query = textwrap.dedent( - """\ + query = _d(""" mutation($gid: UUID!) { purge_group(gid: $gid) { ok msg } } - """ - ) + """) variables = {"gid": gid} data = await api_session.get().Admin._query(query, variables) return data["purge_group"] @@ -265,15 +251,13 @@ async def add_users( Add users to a group. You need an admin privilege for this operation. """ - query = textwrap.dedent( - """\ + query = _d(""" mutation($gid: UUID!, $input: ModifyGroupInput!) { modify_group(gid: $gid, props: $input) { ok msg } } - """ - ) + """) variables = { "gid": gid, "input": { @@ -293,15 +277,13 @@ async def remove_users( Remove users from a group. You need an admin privilege for this operation. """ - query = textwrap.dedent( - """\ + query = _d(""" mutation($gid: UUID!, $input: ModifyGroupInput!) { modify_group(gid: $gid, props: $input) { ok msg } } - """ - ) + """) variables = { "gid": gid, "input": { diff --git a/src/ai/backend/client/func/image.py b/src/ai/backend/client/func/image.py index e045e6a3c4..96e3ad9040 100644 --- a/src/ai/backend/client/func/image.py +++ b/src/ai/backend/client/func/image.py @@ -40,9 +40,9 @@ async def list( """ q = _d(""" query($is_operation: Boolean) { - images(is_operation: $is_operation) { - $fields - } + images(is_operation: $is_operation) { + $fields + } } """) q = q.replace("$fields", " ".join(f.field_ref for f in fields)) @@ -65,9 +65,9 @@ async def get( """ q = _d(""" query($reference: String!, $architecture: String!) { - image(reference: $reference, architecture: $architecture) { - $fields" - } + image(reference: $reference, architecture: $architecture) { + $fields" + } } """) q = q.replace("$fields", " ".join(f.field_ref for f in fields)) @@ -90,9 +90,9 @@ async def get_by_id( """ q = _d(""" query($id: String!) { - image(id: $id) { - $fields - } + image(id: $id) { + $fields + } } """) q = q.replace("$fields", " ".join(f.field_ref for f in fields)) @@ -113,9 +113,9 @@ async def list_customized( """ q = _d(""" query { - customized_images { - $fields - } + customized_images { + $fields + } } """) q = q.replace("$fields", " ".join(f.field_ref for f in fields)) @@ -127,9 +127,9 @@ async def list_customized( async def rescan_images(cls, registry: str): q = _d(""" mutation($registry: String) { - rescan_images(registry:$registry) { - ok msg task_id - } + rescan_images(registry:$registry) { + ok msg task_id + } } """) variables = { @@ -143,9 +143,9 @@ async def rescan_images(cls, registry: str): async def forget_image_by_id(cls, image_id: str): q = _d(""" mutation($image_id: String!) { - forget_image_by_id(image_id: $image_id) { - ok msg - } + forget_image_by_id(image_id: $image_id) { + ok msg + } } """) variables = { @@ -159,9 +159,9 @@ async def forget_image_by_id(cls, image_id: str): async def untag_image_from_registry(cls, id: str): q = _d(""" mutation($id: String!) { - untag_image_from_registry(id: $id) { - ok msg - } + untag_image_from_registry(id: $id) { + ok msg + } } """) variables = { @@ -175,9 +175,9 @@ async def untag_image_from_registry(cls, id: str): async def forget_image(cls, reference: str, architecture: str): q = _d(""" mutation($reference: String!, $architecture: String!) { - forget_image(reference: $reference, architecture: $architecture) { - ok msg - } + forget_image(reference: $reference, architecture: $architecture) { + ok msg + } } """) variables = { @@ -197,9 +197,9 @@ async def alias_image( ) -> dict: q = _d(""" mutation($alias: String!, $target: String!) { - alias_image(alias: $alias, target: $target) { - ok msg - } + alias_image(alias: $alias, target: $target) { + ok msg + } } """) variables = { @@ -216,9 +216,9 @@ async def alias_image( async def dealias_image(cls, alias: str) -> dict: q = _d(""" mutation($alias: String!) { - dealias_image(alias: $alias) { - ok msg - } + dealias_image(alias: $alias) { + ok msg + } } """) variables = { diff --git a/src/ai/backend/client/func/keypair.py b/src/ai/backend/client/func/keypair.py index 4ccaf574e5..8a643a50bd 100644 --- a/src/ai/backend/client/func/keypair.py +++ b/src/ai/backend/client/func/keypair.py @@ -6,6 +6,7 @@ from ..pagination import fetch_paginated_result from ..session import api_session from ..types import set_if_set +from ..utils import dedent as _d from .base import BaseFunction, api_function __all__ = ("KeyPair",) @@ -57,12 +58,15 @@ async def create( You need an admin privilege for this operation. """ uid_type = "Int!" if isinstance(user_id, int) else "String!" - q = ( - "mutation($user_id: {0}, $input: KeyPairInput!) {{".format(uid_type) - + " create_keypair(user_id: $user_id, props: $input) {" - " ok msg keypair { $fields }" - " }" - "}" + q = _d( + """ + mutation($user_id: %s, $input: KeyPairInput!) { + create_keypair(user_id: $user_id, props: $input) { + ok msg keypair { $fields } + } + } + """ + % uid_type ) q = q.replace("$fields", " ".join(f.field_ref for f in fields)) inputs = { @@ -92,10 +96,13 @@ async def update( Creates a new keypair with the given options. You need an admin privilege for this operation. """ - q = ( - "mutation($access_key: String!, $input: ModifyKeyPairInput!) {" - + " modify_keypair(access_key: $access_key, props: $input) { ok msg }}" - ) + q = _d(""" + mutation($access_key: String!, $input: ModifyKeyPairInput!) { + modify_keypair(access_key: $access_key, props: $input) { + ok msg + } + } + """) inputs: dict[str, Any] = {} set_if_set(inputs, "is_active", is_active) set_if_set(inputs, "is_admin", is_admin) @@ -114,13 +121,13 @@ async def delete(cls, access_key: str): """ Deletes an existing keypair with given ACCESSKEY. """ - q = ( - "mutation($access_key: String!) {" - " delete_keypair(access_key: $access_key) {" - " ok msg" - " }" - "}" - ) + q = _d(""" + mutation($access_key: String!) { + delete_keypair(access_key: $access_key) { + ok msg + } + } + """) variables = { "access_key": access_key, } @@ -140,12 +147,24 @@ async def list( You need an admin privilege for this operation. """ if user_id is None: - q = "query($is_active: Boolean) { keypairs(is_active: $is_active) { $fields }}" + q = _d(""" + query($is_active: Boolean) { + keypairs(is_active: $is_active) { + $fields + } + } + """) else: uid_type = "Int!" if isinstance(user_id, int) else "String!" - q = ( - "query($email: {0}, $is_active: Boolean) {{".format(uid_type) - + " keypairs(email: $email, is_active: $is_active) { $fields }}" + q = _d( + """ + query($email: %s, $is_active: Boolean) { + keypairs(email: $email, is_active: $is_active) { + $fields + } + } + """ + % uid_type ) q = q.replace("$fields", " ".join(f.field_ref for f in fields)) variables: Dict[str, Any] = { @@ -199,7 +218,7 @@ async def info(self, fields: Sequence[FieldSpec] = _default_detail_fields) -> di .. versionadded:: 18.12 """ - q = "query { keypair { $fields }}" + q = "query { keypair { $fields } }" q = q.replace("$fields", " ".join(f.field_ref for f in fields)) data = await api_session.get().Admin._query(q) return data["keypair"] @@ -211,10 +230,13 @@ async def activate(cls, access_key: str) -> dict: Activates this keypair. You need an admin privilege for this operation. """ - q = ( - "mutation($access_key: String!, $input: ModifyKeyPairInput!) {" - + " modify_keypair(access_key: $access_key, props: $input) { ok msg }}" - ) + q = _d(""" + mutation($access_key: String!, $input: ModifyKeyPairInput!) { + modify_keypair(access_key: $access_key, props: $input) { + ok msg + } + } + """) variables = { "access_key": access_key, "input": { @@ -236,10 +258,13 @@ async def deactivate(cls, access_key: str) -> dict: unless activated again by an administrator. You need an admin privilege for this operation. """ - q = ( - "mutation($access_key: String!, $input: ModifyKeyPairInput!) {" - + " modify_keypair(access_key: $access_key, props: $input) { ok msg }}" - ) + q = _d(""" + mutation($access_key: String!, $input: ModifyKeyPairInput!) { + modify_keypair(access_key: $access_key, props: $input) { + ok msg + } + } + """) variables = { "access_key": access_key, "input": { diff --git a/src/ai/backend/client/func/keypair_resource_policy.py b/src/ai/backend/client/func/keypair_resource_policy.py index bd62de79a2..eaff555355 100644 --- a/src/ai/backend/client/func/keypair_resource_policy.py +++ b/src/ai/backend/client/func/keypair_resource_policy.py @@ -5,6 +5,7 @@ from ..output.types import FieldSpec from ..session import api_session from ..types import set_if_set +from ..utils import dedent as _d from .base import BaseFunction, api_function, resolve_fields __all__ = ("KeypairResourcePolicy",) @@ -68,13 +69,13 @@ async def create( Creates a new keypair resource policy with the given options. You need an admin privilege for this operation. """ - q = ( - "mutation($name: String!, $input: CreateKeyPairResourcePolicyInput!) {" - + " create_keypair_resource_policy(name: $name, props: $input) {" - " ok msg resource_policy { $fields }" - " }" - "}" - ) + q = _d(""" + mutation($name: String!, $input: CreateKeyPairResourcePolicyInput!) { + create_keypair_resource_policy(name: $name, props: $input) { + ok msg resource_policy { $fields } + } + } + """) resolved_fields = resolve_fields( fields, keypair_resource_policy_fields, (keypair_resource_policy_fields["name"],) ) @@ -120,10 +121,13 @@ async def update( Updates an existing keypair resource policy with the given options. You need an admin privilege for this operation. """ - q = ( - "mutation($name: String!, $input: ModifyKeyPairResourcePolicyInput!) {" - + " modify_keypair_resource_policy(name: $name, props: $input) { ok msg }}" - ) + q = _d(""" + mutation($name: String!, $input: ModifyKeyPairResourcePolicyInput!) { + modify_keypair_resource_policy(name: $name, props: $input) { + ok msg + } + } + """) inputs: dict[str, Any] = {} set_if_set(inputs, "default_for_unspecified", default_for_unspecified) set_if_set(inputs, "total_resource_slots", total_resource_slots) @@ -149,10 +153,13 @@ async def delete(cls, name: str) -> dict: Deletes an existing keypair resource policy with given name. You need an admin privilege for this operation. """ - q = ( - "mutation($name: String!) {" - + " delete_keypair_resource_policy(name: $name) { ok msg }}" - ) + q = _d(""" + mutation($name: String!) { + delete_keypair_resource_policy(name: $name) { + ok msg + } + } + """) variables = { "name": name, } @@ -169,7 +176,7 @@ async def list( Lists the keypair resource policies. You need an admin privilege for this operation. """ - q = "query { keypair_resource_policies { $fields }}" + q = "query { keypair_resource_policies { $fields } }" q = q.replace("$fields", " ".join(f.field_ref for f in fields)) data = await api_session.get().Admin._query(q) return data["keypair_resource_policies"] @@ -187,7 +194,11 @@ async def info( .. versionadded:: 19.03 """ - q = "query($name: String) { keypair_resource_policy(name: $name) { $fields }}" + q = _d(""" + query($name: String) { + keypair_resource_policy(name: $name) { $fields } + } + """) q = q.replace("$fields", " ".join(f.field_ref for f in fields)) variables = { "name": name, diff --git a/src/ai/backend/client/func/network.py b/src/ai/backend/client/func/network.py index edc1adc039..c6dd0e3c23 100644 --- a/src/ai/backend/client/func/network.py +++ b/src/ai/backend/client/func/network.py @@ -5,6 +5,7 @@ from ..output.types import FieldSpec, RelayPaginatedResult from ..pagination import execute_paginated_relay_query from ..session import api_session +from ..utils import dedent as _d from .base import BaseFunction, api_function __all__ = ("Network",) @@ -59,15 +60,13 @@ async def create( :param driver: (Optional) The driver of the network. If not specified, the default driver will be used. :return: The created network. """ - q = ( - "mutation($name: String!, $project_id: UUID!, $driver: String) {" - " create_network(name: $name, project_id: $project_id, driver: $driver) {" - " network {" - " row_id" - " }" - " }" - "}" - ) + q = _d(""" + mutation($name: String!, $project_id: UUID!, $driver: String) { + create_network(name: $name, project_id: $project_id, driver: $driver) { + network { row_id } + } + } + """) data = await api_session.get().Admin._query( q, { @@ -93,7 +92,11 @@ async def get( """ Fetches the information of the network. """ - q = "query($id: String!) { network(id: $id) { $fields }}" + q = _d(""" + query($id: String!) { + network(id: $id) { $fields } + } + """) q = q.replace("$fields", " ".join(f.field_ref for f in (fields or _default_list_fields))) data = await api_session.get().Admin._query(q, {"id": str(self.network_id)}) return data["images"] @@ -103,13 +106,13 @@ async def update(self, name: str) -> None: """ Updates network. """ - q = ( - "mutation($network: String!, $props: UpdateNetworkInput!) {" - " modify_network(network: $network, props: $props) {" - " ok msg" - " }" - "}" - ) + q = _d(""" + mutation($network: String!, $props: UpdateNetworkInput!) { + modify_network(network: $network, props: $props) { + ok msg + } + } + """) variables = { "network": str(self.network_id), "props": {"name": name}, @@ -122,7 +125,13 @@ async def delete(self) -> None: """ Deletes network. Delete only works for networks that are not attached to active session. """ - q = "mutation($network: String!) { delete_network(network: $network) { ok msg }}" + q = _d(""" + mutation($network: String!) { + delete_network(network: $network) { + ok msg + } + } + """) variables = { "network": str(self.network_id), } diff --git a/src/ai/backend/client/func/quota_scope.py b/src/ai/backend/client/func/quota_scope.py index 03753d0a52..f4b8e3a168 100644 --- a/src/ai/backend/client/func/quota_scope.py +++ b/src/ai/backend/client/func/quota_scope.py @@ -1,4 +1,3 @@ -import textwrap from typing import Any, Sequence from ai.backend.common.types import QuotaConfig, QuotaScopeID @@ -7,6 +6,7 @@ from ..output.types import FieldSpec from ..session import api_session from ..types import set_if_set +from ..utils import dedent as _d from .base import BaseFunction, api_function _default_user_fields = ( @@ -40,13 +40,11 @@ async def get_user_info( email: str, fields: Sequence[FieldSpec] = _default_user_fields, ) -> dict[str, Any]: - query = textwrap.dedent( - """\ + query = _d(""" query($domain_name: String!, $email: String!) { - user(domain_name: $domain_name, email: $email) {$fields} + user(domain_name: $domain_name, email: $email) { $fields } } - """ - ) + """) query = query.replace("$fields", " ".join(f.field_ref for f in fields)) variables = { "domain_name": domain_name, @@ -63,13 +61,11 @@ async def get_project_info( name: str, fields: Sequence[FieldSpec] = _default_project_fields, ) -> dict[str, Any]: - query = textwrap.dedent( - """\ + query = _d(""" query($domain_name: String!, $name: String!) { - groups_by_name(domain_name: $domain_name, name: $name) {$fields} + groups_by_name(domain_name: $domain_name, name: $name) { $fields } } - """ - ) + """) query = query.replace("$fields", " ".join(f.field_ref for f in fields)) variables = { "domain_name": domain_name, @@ -86,15 +82,13 @@ async def get_quota_scope( qsid: QuotaScopeID, fields: Sequence[FieldSpec] = _default_detail_fields, ) -> dict[str, Any]: - query = textwrap.dedent( - """\ + query = _d(""" query($storage_host_name: String!, $quota_scope_id: String!) { quota_scope(storage_host_name: $storage_host_name, quota_scope_id: $quota_scope_id) { - details {$fields} + details { $fields } } } - """ - ) + """) query = query.replace("$fields", " ".join(f.field_ref for f in fields)) variables = { "storage_host_name": host, @@ -112,15 +106,13 @@ async def set_quota_scope( config: QuotaConfig, fields: Sequence[FieldSpec] = _default_quota_scope_fields, ) -> dict[str, Any]: - query = textwrap.dedent( - """\ + query = _d(""" mutation($storage_host_name: String!, $quota_scope_id: String!, $input: QuotaScopeInput!) { set_quota_scope(storage_host_name: $storage_host_name, quota_scope_id: $quota_scope_id, props: $input) { - quota_scope {$fields} + quota_scope { $fields } } } - """ - ) + """) query = query.replace("$fields", " ".join(f.field_ref for f in fields)) inputs: dict[str, Any] = {} set_if_set(inputs, "hard_limit_bytes", config.limit_bytes) @@ -140,15 +132,13 @@ async def unset_quota_scope( qsid: QuotaScopeID, fields: Sequence[FieldSpec] = _default_quota_scope_fields, ) -> dict[str, Any]: - query = textwrap.dedent( - """\ + query = _d(""" mutation($storage_host_name: String!, $quota_scope_id: String!) { unset_quota_scope(storage_host_name: $storage_host_name, quota_scope_id: $quota_scope_id) { - quota_scope {$fields} + quota_scope { $fields } } } - """ - ) + """) query = query.replace("$fields", " ".join(f.field_ref for f in fields)) variables = { "storage_host_name": host, diff --git a/src/ai/backend/client/func/scaling_group.py b/src/ai/backend/client/func/scaling_group.py index 743e09ad90..38c3d3316b 100644 --- a/src/ai/backend/client/func/scaling_group.py +++ b/src/ai/backend/client/func/scaling_group.py @@ -1,5 +1,4 @@ import json -import textwrap from typing import Any, Iterable, Mapping, Optional, Sequence from ...cli.types import Undefined, undefined @@ -8,6 +7,7 @@ from ..request import Request from ..session import api_session from ..types import set_if_set +from ..utils import dedent as _d from .base import BaseFunction, api_function, resolve_fields __all__ = ("ScalingGroup",) @@ -80,15 +80,13 @@ async def list( List available scaling groups for the current user, considering the user, the user's domain, and the designated user group. """ - query = textwrap.dedent( - """\ + query = _d(""" query($is_active: Boolean) { scaling_groups(is_active: $is_active) { $fields } } - """ - ) + """) query = query.replace("$fields", " ".join(f.field_ref for f in fields)) variables = {"is_active": None} data = await api_session.get().Admin._query(query, variables) @@ -107,13 +105,11 @@ async def detail( :param name: Name of the scaling group. :param fields: Additional per-scaling-group query fields. """ - query = textwrap.dedent( - """\ + query = _d(""" query($name: String) { - scaling_group(name: $name) {$fields} + scaling_group(name: $name) { $fields } } - """ - ) + """) query = query.replace("$fields", " ".join(f.field_ref for f in fields)) variables = {"name": name} data = await api_session.get().Admin._query(query, variables) @@ -140,15 +136,13 @@ async def create( """ Creates a new scaling group with the given options. """ - query = textwrap.dedent( - """\ + query = _d(""" mutation($name: String!, $input: CreateScalingGroupInput!) { create_scaling_group(name: $name, props: $input) { - ok msg scaling_group {$fields} + ok msg scaling_group { $fields } } } - """ - ) + """) resolved_fields = resolve_fields( fields, scaling_group_fields, (scaling_group_fields["name"],) ) @@ -193,15 +187,13 @@ async def update( """ Update existing scaling group. """ - query = textwrap.dedent( - """\ + query = _d(""" mutation($name: String!, $input: ModifyScalingGroupInput!) { modify_scaling_group(name: $name, props: $input) { ok msg } } - """ - ) + """) resolved_fields = resolve_fields( fields, scaling_group_fields, (scaling_group_fields["name"],) ) @@ -230,15 +222,13 @@ async def delete(cls, name: str): """ Deletes an existing scaling group. """ - query = textwrap.dedent( - """\ + query = _d(""" mutation($name: String!) { delete_scaling_group(name: $name) { ok msg } } - """ - ) + """) variables = {"name": name} data = await api_session.get().Admin._query(query, variables) return data["delete_scaling_group"] @@ -252,16 +242,14 @@ async def associate_domain(cls, scaling_group: str, domain: str): :param scaling_group: The name of a scaling group. :param domain: The name of a domain. """ - query = textwrap.dedent( - """\ + query = _d(""" mutation($scaling_group: String!, $domain: String!) { associate_scaling_group_with_domain( scaling_group: $scaling_group, domain: $domain) { ok msg } } - """ - ) + """) variables = {"scaling_group": scaling_group, "domain": domain} data = await api_session.get().Admin._query(query, variables) return data["associate_scaling_group_with_domain"] @@ -275,16 +263,14 @@ async def dissociate_domain(cls, scaling_group: str, domain: str): :param scaling_group: The name of a scaling group. :param domain: The name of a domain. """ - query = textwrap.dedent( - """\ + query = _d(""" mutation($scaling_group: String!, $domain: String!) { disassociate_scaling_group_with_domain( scaling_group: $scaling_group, domain: $domain) { ok msg } } - """ - ) + """) variables = {"scaling_group": scaling_group, "domain": domain} data = await api_session.get().Admin._query(query, variables) return data["disassociate_scaling_group_with_domain"] @@ -297,15 +283,13 @@ async def dissociate_all_domain(cls, domain: str): :param domain: The name of a domain. """ - query = textwrap.dedent( - """\ + query = _d(""" mutation($domain: String!) { disassociate_all_scaling_groups_with_domain(domain: $domain) { ok msg } } - """ - ) + """) variables = {"domain": domain} data = await api_session.get().Admin._query(query, variables) return data["disassociate_all_scaling_groups_with_domain"] @@ -319,16 +303,14 @@ async def associate_group(cls, scaling_group: str, group_id: str): :param scaling_group: The name of a scaling group. :param group_id: The ID of a group. """ - query = textwrap.dedent( - """\ + query = _d(""" mutation($scaling_group: String!, $user_group: UUID!) { associate_scaling_group_with_user_group( scaling_group: $scaling_group, user_group: $user_group) { ok msg } } - """ - ) + """) variables = {"scaling_group": scaling_group, "user_group": group_id} data = await api_session.get().Admin._query(query, variables) return data["associate_scaling_group_with_user_group"] @@ -342,16 +324,14 @@ async def dissociate_group(cls, scaling_group: str, group_id: str): :param scaling_group: The name of a scaling group. :param group_id: The ID of a group. """ - query = textwrap.dedent( - """\ + query = _d(""" mutation($scaling_group: String!, $user_group: String!) { disassociate_scaling_group_with_user_group( scaling_group: $scaling_group, user_group: $user_group) { ok msg } } - """ - ) + """) variables = {"scaling_group": scaling_group, "user_group": group_id} data = await api_session.get().Admin._query(query, variables) return data["disassociate_scaling_group_with_user_group"] @@ -364,15 +344,13 @@ async def dissociate_all_group(cls, group_id: str): :param group_id: The ID of a group. """ - query = textwrap.dedent( - """\ + query = _d(""" mutation($group_id: UUID!) { disassociate_all_scaling_groups_with_group(user_group: $group_id) { ok msg } } - """ - ) + """) variables = {"group_id": group_id} data = await api_session.get().Admin._query(query, variables) return data["disassociate_all_scaling_groups_with_group"] diff --git a/src/ai/backend/client/func/service.py b/src/ai/backend/client/func/service.py index ce11f81169..8fb720f158 100644 --- a/src/ai/backend/client/func/service.py +++ b/src/ai/backend/client/func/service.py @@ -1,17 +1,17 @@ -import textwrap from typing import Any, Literal, Mapping, Optional, Sequence from uuid import UUID from faker import Faker -from ai.backend.client.exceptions import BackendClientError -from ai.backend.client.output.fields import service_fields -from ai.backend.client.output.types import FieldSpec, PaginatedResult -from ai.backend.client.pagination import fetch_paginated_result -from ai.backend.client.request import Request -from ai.backend.client.session import api_session from ai.backend.common.arch import DEFAULT_IMAGE_ARCH +from ..exceptions import BackendClientError +from ..output.fields import service_fields +from ..output.types import FieldSpec, PaginatedResult +from ..pagination import fetch_paginated_result +from ..request import Request +from ..session import api_session +from ..utils import dedent as _d from .base import BaseFunction, api_function __all__ = ("Service",) @@ -71,13 +71,11 @@ async def detail( service_id: str, fields: Sequence[FieldSpec] = _default_fields, ) -> Sequence[dict]: - query = textwrap.dedent( - """\ + query = _d(""" query($endpoint_id: UUID!) { - endpoint(endpoint_id: $endpoint_id) {$fields} + endpoint(endpoint_id: $endpoint_id) { $fields } } - """ - ) + """) query = query.replace("$fields", " ".join(f.field_ref for f in fields)) variables = {"endpoint_id": service_id} data = await api_session.get().Admin._query(query, variables) diff --git a/src/ai/backend/client/func/session.py b/src/ai/backend/client/func/session.py index 4a65107402..7e6a17fc97 100644 --- a/src/ai/backend/client/func/session.py +++ b/src/ai/backend/client/func/session.py @@ -5,7 +5,6 @@ import secrets import tarfile import tempfile -import textwrap from collections.abc import ( AsyncIterator, Iterable, @@ -47,6 +46,7 @@ from ..session import api_session from ..types import set_if_set from ..utils import ProgressReportingReader +from ..utils import dedent as _d from ..versioning import get_id_or_name, get_naming from .base import BaseFunction, api_function @@ -555,8 +555,7 @@ async def update( f"{self!r} must have a valid session ID to invoke the update() method." ) client_mutation_id = secrets.token_urlsafe(16) - query = textwrap.dedent( - """\ + query = _d(""" mutation($input: ModifyComputeSessionInput!) { modify_compute_session(input: $input) { item { @@ -566,8 +565,7 @@ async def update( clientMutationId } } - """ - ) + """) inputs: dict[str, Any] = { "id": str(self.id), "clientMutationId": client_mutation_id, diff --git a/src/ai/backend/client/func/storage.py b/src/ai/backend/client/func/storage.py index 1d5639df5a..7d49b7b81c 100644 --- a/src/ai/backend/client/func/storage.py +++ b/src/ai/backend/client/func/storage.py @@ -1,11 +1,10 @@ -import textwrap from typing import Optional, Sequence -from ai.backend.client.output.fields import storage_fields -from ai.backend.client.output.types import FieldSpec, PaginatedResult -from ai.backend.client.pagination import fetch_paginated_result -from ai.backend.client.session import api_session - +from ..output.fields import storage_fields +from ..output.types import FieldSpec, PaginatedResult +from ..pagination import fetch_paginated_result +from ..session import api_session +from ..utils import dedent as _d from .base import BaseFunction, api_function __all__ = ("Storage",) @@ -72,13 +71,11 @@ async def detail( vfolder_host: str, fields: Sequence[FieldSpec] = _default_detail_fields, ) -> dict: - query = textwrap.dedent( - """\ + query = _d(""" query($vfolder_host: String!) { - storage_volume(id: $vfolder_host) {$fields} + storage_volume(id: $vfolder_host) { $fields } } - """ - ) + """) query = query.replace("$fields", " ".join(f.field_ref for f in fields)) variables = {"vfolder_host": vfolder_host} data = await api_session.get().Admin._query(query, variables) diff --git a/src/ai/backend/client/func/user.py b/src/ai/backend/client/func/user.py index ce401df26b..b5f4b79b1d 100644 --- a/src/ai/backend/client/func/user.py +++ b/src/ai/backend/client/func/user.py @@ -1,7 +1,6 @@ from __future__ import annotations import enum -import textwrap import uuid from typing import Any, Iterable, Mapping, Sequence, Union @@ -13,6 +12,7 @@ from ..request import Request from ..session import api_session from ..types import set_if_set +from ..utils import dedent as _d from .base import BaseFunction, api_function, resolve_fields __all__ = ( @@ -136,13 +136,11 @@ async def list( :param group: Fetch users in a specific group. :param fields: Additional per-user query fields to fetch. """ - query = textwrap.dedent( - """\ + query = _d(""" query($status: String, $group: UUID) { - users(status: $status, group_id: $group) {$fields} + users(status: $status, group_id: $group) { $fields } } - """ - ) + """) query = query.replace("$fields", " ".join(f.field_ref for f in fields)) variables = { "status": status, @@ -200,21 +198,17 @@ async def detail( :param fields: Additional per-user query fields to fetch. """ if email is None: - query = textwrap.dedent( - """\ + query = _d(""" query { - user {$fields} + user { $fields } } - """ - ) + """) else: - query = textwrap.dedent( - """\ + query = _d(""" query($email: String) { - user(email: $email) {$fields} + user(email: $email) { $fields } } - """ - ) + """) query = query.replace("$fields", " ".join(f.field_ref for f in fields)) variables = {"email": email} data = await api_session.get().Admin._query(query, variables if email is not None else None) @@ -235,21 +229,17 @@ async def detail_by_uuid( :param fields: Additional per-user query fields to fetch. """ if user_uuid is None: - query = textwrap.dedent( - """\ + query = _d(""" query { - user {$fields} + user { $fields } } - """ - ) + """) else: - query = textwrap.dedent( - """\ + query = _d(""" query($user_id: ID) { - user_from_uuid(user_id: $user_id) {$fields} + user_from_uuid(user_id: $user_id) { $fields } } - """ - ) + """) query = query.replace("$fields", " ".join(f.field_ref for f in fields)) variables = {"user_id": str(user_uuid)} data = await api_session.get().Admin._query( @@ -281,15 +271,13 @@ async def create( Creates a new user with the given options. You need an admin privilege for this operation. """ - query = textwrap.dedent( - """\ + query = _d(""" mutation($email: String!, $input: UserInput!) { create_user(email: $email, props: $input) { - ok msg user {$fields} + ok msg user { $fields } } } - """ - ) + """) default_fields = ( user_fields["domain_name"], user_fields["email"], @@ -344,15 +332,13 @@ async def update( Update existing user. You need an admin privilege for this operation. """ - query = textwrap.dedent( - """\ + query = _d(""" mutation($email: String!, $input: ModifyUserInput!) { modify_user(email: $email, props: $input) { ok msg } } - """ - ) + """) inputs: dict[str, Any] = {} set_if_set(inputs, "password", password) set_if_set(inputs, "username", username) @@ -380,15 +366,13 @@ async def delete(cls, email: str): """ Inactivates an existing user. """ - query = textwrap.dedent( - """\ + query = _d(""" mutation($email: String!) { delete_user(email: $email) { ok msg } } - """ - ) + """) variables = {"email": email} data = await api_session.get().Admin._query(query, variables) return data["delete_user"] @@ -403,15 +387,13 @@ async def purge(cls, email: str, purge_shared_vfolders=False): Shared virtual folder's ownership will be transferred to the requested admin. To delete shared folders as well, set ``purge_shared_vfolders`` to ``True``. """ - query = textwrap.dedent( - """\ + query = _d(""" mutation($email: String!, $input: PurgeUserInput!) { purge_user(email: $email, props: $input) { ok msg } } - """ - ) + """) variables = { "email": email, "input": { From 51dc0e61bbe40c074de06c0e717b09123c4d6592 Mon Sep 17 00:00:00 2001 From: Joongi Kim Date: Fri, 3 Jan 2025 00:10:54 +0900 Subject: [PATCH 9/9] style: fix indent --- src/ai/backend/client/func/keypair.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/ai/backend/client/func/keypair.py b/src/ai/backend/client/func/keypair.py index 8a643a50bd..9fe2eb0ef8 100644 --- a/src/ai/backend/client/func/keypair.py +++ b/src/ai/backend/client/func/keypair.py @@ -61,9 +61,9 @@ async def create( q = _d( """ mutation($user_id: %s, $input: KeyPairInput!) { - create_keypair(user_id: $user_id, props: $input) { - ok msg keypair { $fields } - } + create_keypair(user_id: $user_id, props: $input) { + ok msg keypair { $fields } + } } """ % uid_type