From a5ff5f1f24b190d3ca5c1213fd321e52b191cca0 Mon Sep 17 00:00:00 2001 From: Aidan Holland Date: Tue, 13 Jun 2023 10:25:36 -0400 Subject: [PATCH 01/19] feat: make azure and gcp async --- poetry.lock | 516 +++++++++++++++++- pyproject.toml | 2 + .../azure_connector/connector.py | 274 +++++++--- .../cloud_connectors/common/cli/__init__.py | 6 +- .../common/cli/commands/scan.py | 36 +- .../cloud_connectors/common/connector.py | 109 ++-- .../cloud_connectors/common/healthcheck.py | 4 +- .../cloud_connectors/common/settings.py | 9 +- .../gcp_connector/connector.py | 250 ++++++--- 9 files changed, 949 insertions(+), 257 deletions(-) diff --git a/poetry.lock b/poetry.lock index bce01dd..4952112 100644 --- a/poetry.lock +++ b/poetry.lock @@ -18,6 +18,202 @@ PyJWT = ">=1.0.0,<3" python-dateutil = ">=2.1.0,<3" requests = ">=2.0.0,<3" +[[package]] +name = "aioboto3" +version = "11.2.0" +description = "Async boto3 wrapper" +category = "dev" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "aioboto3-11.2.0-py3-none-any.whl", hash = "sha256:df4b83c3943b009a4dcd9f397f9f0491a374511b1ef37545082a771ca1e549fb"}, + {file = "aioboto3-11.2.0.tar.gz", hash = "sha256:c7f6234fd73efcb60ab6fca383fec33bb6352ca1832f252eac810cd6674f1748"}, +] + +[package.dependencies] +aiobotocore = {version = "2.5.0", extras = ["boto3"]} + +[package.extras] +chalice = ["chalice (>=1.24.0)"] +s3cse = ["cryptography (>=2.3.1)"] + +[[package]] +name = "aiobotocore" +version = "2.5.0" +description = "Async client for aws services using botocore and aiohttp" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "aiobotocore-2.5.0-py3-none-any.whl", hash = "sha256:9a2a022d7b78ec9a2af0de589916d2721cddbf96264401b78d7a73c1a1435f3b"}, + {file = "aiobotocore-2.5.0.tar.gz", hash = "sha256:6a5b397cddd4f81026aa91a14c7dd2650727425740a5af8ba75127ff663faf67"}, +] + +[package.dependencies] +aiohttp = ">=3.3.1" +aioitertools = ">=0.5.1" +boto3 = {version = ">=1.26.76,<1.26.77", optional = true, markers = "extra == \"boto3\""} +botocore = ">=1.29.76,<1.29.77" +wrapt = ">=1.10.10" + +[package.extras] +awscli = ["awscli (>=1.27.76,<1.27.77)"] +boto3 = ["boto3 (>=1.26.76,<1.26.77)"] + +[[package]] +name = "aiohttp" +version = "3.8.4" +description = "Async http client/server framework (asyncio)" +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ + {file = "aiohttp-3.8.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5ce45967538fb747370308d3145aa68a074bdecb4f3a300869590f725ced69c1"}, + {file = "aiohttp-3.8.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b744c33b6f14ca26b7544e8d8aadff6b765a80ad6164fb1a430bbadd593dfb1a"}, + {file = "aiohttp-3.8.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1a45865451439eb320784918617ba54b7a377e3501fb70402ab84d38c2cd891b"}, + {file = "aiohttp-3.8.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a86d42d7cba1cec432d47ab13b6637bee393a10f664c425ea7b305d1301ca1a3"}, + {file = "aiohttp-3.8.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee3c36df21b5714d49fc4580247947aa64bcbe2939d1b77b4c8dcb8f6c9faecc"}, + {file = "aiohttp-3.8.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:176a64b24c0935869d5bbc4c96e82f89f643bcdf08ec947701b9dbb3c956b7dd"}, + {file = "aiohttp-3.8.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c844fd628851c0bc309f3c801b3a3d58ce430b2ce5b359cd918a5a76d0b20cb5"}, + {file = "aiohttp-3.8.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5393fb786a9e23e4799fec788e7e735de18052f83682ce2dfcabaf1c00c2c08e"}, + {file = "aiohttp-3.8.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e4b09863aae0dc965c3ef36500d891a3ff495a2ea9ae9171e4519963c12ceefd"}, + {file = "aiohttp-3.8.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:adfbc22e87365a6e564c804c58fc44ff7727deea782d175c33602737b7feadb6"}, + {file = "aiohttp-3.8.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:147ae376f14b55f4f3c2b118b95be50a369b89b38a971e80a17c3fd623f280c9"}, + {file = "aiohttp-3.8.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:eafb3e874816ebe2a92f5e155f17260034c8c341dad1df25672fb710627c6949"}, + {file = "aiohttp-3.8.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c6cc15d58053c76eacac5fa9152d7d84b8d67b3fde92709195cb984cfb3475ea"}, + {file = "aiohttp-3.8.4-cp310-cp310-win32.whl", hash = "sha256:59f029a5f6e2d679296db7bee982bb3d20c088e52a2977e3175faf31d6fb75d1"}, + {file = "aiohttp-3.8.4-cp310-cp310-win_amd64.whl", hash = "sha256:fe7ba4a51f33ab275515f66b0a236bcde4fb5561498fe8f898d4e549b2e4509f"}, + {file = "aiohttp-3.8.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3d8ef1a630519a26d6760bc695842579cb09e373c5f227a21b67dc3eb16cfea4"}, + {file = "aiohttp-3.8.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b3f2e06a512e94722886c0827bee9807c86a9f698fac6b3aee841fab49bbfb4"}, + {file = "aiohttp-3.8.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3a80464982d41b1fbfe3154e440ba4904b71c1a53e9cd584098cd41efdb188ef"}, + {file = "aiohttp-3.8.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b631e26df63e52f7cce0cce6507b7a7f1bc9b0c501fcde69742130b32e8782f"}, + {file = "aiohttp-3.8.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f43255086fe25e36fd5ed8f2ee47477408a73ef00e804cb2b5cba4bf2ac7f5e"}, + {file = "aiohttp-3.8.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4d347a172f866cd1d93126d9b239fcbe682acb39b48ee0873c73c933dd23bd0f"}, + {file = "aiohttp-3.8.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a3fec6a4cb5551721cdd70473eb009d90935b4063acc5f40905d40ecfea23e05"}, + {file = "aiohttp-3.8.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:80a37fe8f7c1e6ce8f2d9c411676e4bc633a8462844e38f46156d07a7d401654"}, + {file = "aiohttp-3.8.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d1e6a862b76f34395a985b3cd39a0d949ca80a70b6ebdea37d3ab39ceea6698a"}, + {file = "aiohttp-3.8.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cd468460eefef601ece4428d3cf4562459157c0f6523db89365202c31b6daebb"}, + {file = "aiohttp-3.8.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:618c901dd3aad4ace71dfa0f5e82e88b46ef57e3239fc7027773cb6d4ed53531"}, + {file = "aiohttp-3.8.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:652b1bff4f15f6287550b4670546a2947f2a4575b6c6dff7760eafb22eacbf0b"}, + {file = "aiohttp-3.8.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80575ba9377c5171407a06d0196b2310b679dc752d02a1fcaa2bc20b235dbf24"}, + {file = "aiohttp-3.8.4-cp311-cp311-win32.whl", hash = "sha256:bbcf1a76cf6f6dacf2c7f4d2ebd411438c275faa1dc0c68e46eb84eebd05dd7d"}, + {file = "aiohttp-3.8.4-cp311-cp311-win_amd64.whl", hash = "sha256:6e74dd54f7239fcffe07913ff8b964e28b712f09846e20de78676ce2a3dc0bfc"}, + {file = "aiohttp-3.8.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:880e15bb6dad90549b43f796b391cfffd7af373f4646784795e20d92606b7a51"}, + {file = "aiohttp-3.8.4-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb96fa6b56bb536c42d6a4a87dfca570ff8e52de2d63cabebfd6fb67049c34b6"}, + {file = "aiohttp-3.8.4-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a6cadebe132e90cefa77e45f2d2f1a4b2ce5c6b1bfc1656c1ddafcfe4ba8131"}, + {file = "aiohttp-3.8.4-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f352b62b45dff37b55ddd7b9c0c8672c4dd2eb9c0f9c11d395075a84e2c40f75"}, + {file = "aiohttp-3.8.4-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ab43061a0c81198d88f39aaf90dae9a7744620978f7ef3e3708339b8ed2ef01"}, + {file = "aiohttp-3.8.4-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9cb1565a7ad52e096a6988e2ee0397f72fe056dadf75d17fa6b5aebaea05622"}, + {file = "aiohttp-3.8.4-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:1b3ea7edd2d24538959c1c1abf97c744d879d4e541d38305f9bd7d9b10c9ec41"}, + {file = "aiohttp-3.8.4-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:7c7837fe8037e96b6dd5cfcf47263c1620a9d332a87ec06a6ca4564e56bd0f36"}, + {file = "aiohttp-3.8.4-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:3b90467ebc3d9fa5b0f9b6489dfb2c304a1db7b9946fa92aa76a831b9d587e99"}, + {file = "aiohttp-3.8.4-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:cab9401de3ea52b4b4c6971db5fb5c999bd4260898af972bf23de1c6b5dd9d71"}, + {file = "aiohttp-3.8.4-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:d1f9282c5f2b5e241034a009779e7b2a1aa045f667ff521e7948ea9b56e0c5ff"}, + {file = "aiohttp-3.8.4-cp36-cp36m-win32.whl", hash = "sha256:5e14f25765a578a0a634d5f0cd1e2c3f53964553a00347998dfdf96b8137f777"}, + {file = "aiohttp-3.8.4-cp36-cp36m-win_amd64.whl", hash = "sha256:4c745b109057e7e5f1848c689ee4fb3a016c8d4d92da52b312f8a509f83aa05e"}, + {file = "aiohttp-3.8.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:aede4df4eeb926c8fa70de46c340a1bc2c6079e1c40ccf7b0eae1313ffd33519"}, + {file = "aiohttp-3.8.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ddaae3f3d32fc2cb4c53fab020b69a05c8ab1f02e0e59665c6f7a0d3a5be54f"}, + {file = "aiohttp-3.8.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4eb3b82ca349cf6fadcdc7abcc8b3a50ab74a62e9113ab7a8ebc268aad35bb9"}, + {file = "aiohttp-3.8.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9bcb89336efa095ea21b30f9e686763f2be4478f1b0a616969551982c4ee4c3b"}, + {file = "aiohttp-3.8.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c08e8ed6fa3d477e501ec9db169bfac8140e830aa372d77e4a43084d8dd91ab"}, + {file = "aiohttp-3.8.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c6cd05ea06daca6ad6a4ca3ba7fe7dc5b5de063ff4daec6170ec0f9979f6c332"}, + {file = "aiohttp-3.8.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7a00a9ed8d6e725b55ef98b1b35c88013245f35f68b1b12c5cd4100dddac333"}, + {file = "aiohttp-3.8.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:de04b491d0e5007ee1b63a309956eaed959a49f5bb4e84b26c8f5d49de140fa9"}, + {file = "aiohttp-3.8.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:40653609b3bf50611356e6b6554e3a331f6879fa7116f3959b20e3528783e699"}, + {file = "aiohttp-3.8.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:dbf3a08a06b3f433013c143ebd72c15cac33d2914b8ea4bea7ac2c23578815d6"}, + {file = "aiohttp-3.8.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:854f422ac44af92bfe172d8e73229c270dc09b96535e8a548f99c84f82dde241"}, + {file = "aiohttp-3.8.4-cp37-cp37m-win32.whl", hash = "sha256:aeb29c84bb53a84b1a81c6c09d24cf33bb8432cc5c39979021cc0f98c1292a1a"}, + {file = "aiohttp-3.8.4-cp37-cp37m-win_amd64.whl", hash = "sha256:db3fc6120bce9f446d13b1b834ea5b15341ca9ff3f335e4a951a6ead31105480"}, + {file = "aiohttp-3.8.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fabb87dd8850ef0f7fe2b366d44b77d7e6fa2ea87861ab3844da99291e81e60f"}, + {file = "aiohttp-3.8.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:91f6d540163f90bbaef9387e65f18f73ffd7c79f5225ac3d3f61df7b0d01ad15"}, + {file = "aiohttp-3.8.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d265f09a75a79a788237d7f9054f929ced2e69eb0bb79de3798c468d8a90f945"}, + {file = "aiohttp-3.8.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d89efa095ca7d442a6d0cbc755f9e08190ba40069b235c9886a8763b03785da"}, + {file = "aiohttp-3.8.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4dac314662f4e2aa5009977b652d9b8db7121b46c38f2073bfeed9f4049732cd"}, + {file = "aiohttp-3.8.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe11310ae1e4cd560035598c3f29d86cef39a83d244c7466f95c27ae04850f10"}, + {file = "aiohttp-3.8.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ddb2a2026c3f6a68c3998a6c47ab6795e4127315d2e35a09997da21865757f8"}, + {file = "aiohttp-3.8.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e75b89ac3bd27d2d043b234aa7b734c38ba1b0e43f07787130a0ecac1e12228a"}, + {file = "aiohttp-3.8.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6e601588f2b502c93c30cd5a45bfc665faaf37bbe835b7cfd461753068232074"}, + {file = "aiohttp-3.8.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a5d794d1ae64e7753e405ba58e08fcfa73e3fad93ef9b7e31112ef3c9a0efb52"}, + {file = "aiohttp-3.8.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:a1f4689c9a1462f3df0a1f7e797791cd6b124ddbee2b570d34e7f38ade0e2c71"}, + {file = "aiohttp-3.8.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:3032dcb1c35bc330134a5b8a5d4f68c1a87252dfc6e1262c65a7e30e62298275"}, + {file = "aiohttp-3.8.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8189c56eb0ddbb95bfadb8f60ea1b22fcfa659396ea36f6adcc521213cd7b44d"}, + {file = "aiohttp-3.8.4-cp38-cp38-win32.whl", hash = "sha256:33587f26dcee66efb2fff3c177547bd0449ab7edf1b73a7f5dea1e38609a0c54"}, + {file = "aiohttp-3.8.4-cp38-cp38-win_amd64.whl", hash = "sha256:e595432ac259af2d4630008bf638873d69346372d38255774c0e286951e8b79f"}, + {file = "aiohttp-3.8.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5a7bdf9e57126dc345b683c3632e8ba317c31d2a41acd5800c10640387d193ed"}, + {file = "aiohttp-3.8.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:22f6eab15b6db242499a16de87939a342f5a950ad0abaf1532038e2ce7d31567"}, + {file = "aiohttp-3.8.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7235604476a76ef249bd64cb8274ed24ccf6995c4a8b51a237005ee7a57e8643"}, + {file = "aiohttp-3.8.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea9eb976ffdd79d0e893869cfe179a8f60f152d42cb64622fca418cd9b18dc2a"}, + {file = "aiohttp-3.8.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:92c0cea74a2a81c4c76b62ea1cac163ecb20fb3ba3a75c909b9fa71b4ad493cf"}, + {file = "aiohttp-3.8.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:493f5bc2f8307286b7799c6d899d388bbaa7dfa6c4caf4f97ef7521b9cb13719"}, + {file = "aiohttp-3.8.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0a63f03189a6fa7c900226e3ef5ba4d3bd047e18f445e69adbd65af433add5a2"}, + {file = "aiohttp-3.8.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10c8cefcff98fd9168cdd86c4da8b84baaa90bf2da2269c6161984e6737bf23e"}, + {file = "aiohttp-3.8.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bca5f24726e2919de94f047739d0a4fc01372801a3672708260546aa2601bf57"}, + {file = "aiohttp-3.8.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:03baa76b730e4e15a45f81dfe29a8d910314143414e528737f8589ec60cf7391"}, + {file = "aiohttp-3.8.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:8c29c77cc57e40f84acef9bfb904373a4e89a4e8b74e71aa8075c021ec9078c2"}, + {file = "aiohttp-3.8.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:03543dcf98a6619254b409be2d22b51f21ec66272be4ebda7b04e6412e4b2e14"}, + {file = "aiohttp-3.8.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:17b79c2963db82086229012cff93ea55196ed31f6493bb1ccd2c62f1724324e4"}, + {file = "aiohttp-3.8.4-cp39-cp39-win32.whl", hash = "sha256:34ce9f93a4a68d1272d26030655dd1b58ff727b3ed2a33d80ec433561b03d67a"}, + {file = "aiohttp-3.8.4-cp39-cp39-win_amd64.whl", hash = "sha256:41a86a69bb63bb2fc3dc9ad5ea9f10f1c9c8e282b471931be0268ddd09430b04"}, + {file = "aiohttp-3.8.4.tar.gz", hash = "sha256:bf2e1a9162c1e441bf805a1fd166e249d574ca04e03b34f97e2928769e91ab5c"}, +] + +[package.dependencies] +aiosignal = ">=1.1.2" +async-timeout = ">=4.0.0a3,<5.0" +attrs = ">=17.3.0" +charset-normalizer = ">=2.0,<4.0" +frozenlist = ">=1.1.1" +multidict = ">=4.5,<7.0" +yarl = ">=1.0,<2.0" + +[package.extras] +speedups = ["Brotli", "aiodns", "cchardet"] + +[[package]] +name = "aioitertools" +version = "0.11.0" +description = "itertools and builtins for AsyncIO and mixed iterables" +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ + {file = "aioitertools-0.11.0-py3-none-any.whl", hash = "sha256:04b95e3dab25b449def24d7df809411c10e62aab0cbe31a50ca4e68748c43394"}, + {file = "aioitertools-0.11.0.tar.gz", hash = "sha256:42c68b8dd3a69c2bf7f2233bf7df4bb58b557bca5252ac02ed5187bbc67d6831"}, +] + +[package.dependencies] +typing_extensions = {version = ">=4.0", markers = "python_version < \"3.10\""} + +[[package]] +name = "aiometer" +version = "0.4.0" +description = "A Python concurrency scheduling library, compatible with asyncio and trio" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "aiometer-0.4.0-py3-none-any.whl", hash = "sha256:020cc80716565deeb091eb1c0c78ee9693ed5177f193f56ec253de9dde794b23"}, + {file = "aiometer-0.4.0.tar.gz", hash = "sha256:dfdd4c082d1dd03271216574ddaa9b0cd17bb0004f94992cc4c85fcbc1abfab5"}, +] + +[package.dependencies] +anyio = ">=3.2,<4.0" + +[[package]] +name = "aiosignal" +version = "1.3.1" +description = "aiosignal: a list of registered asynchronous callbacks" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, + {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, +] + +[package.dependencies] +frozenlist = ">=1.1.0" + [[package]] name = "alabaster" version = "0.7.13" @@ -41,6 +237,28 @@ files = [ {file = "antlr4-python3-runtime-4.9.3.tar.gz", hash = "sha256:f224469b4168294902bb1efa80a8bf7855f24c99aef99cbefc1bcd3cce77881b"}, ] +[[package]] +name = "anyio" +version = "3.7.0" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "anyio-3.7.0-py3-none-any.whl", hash = "sha256:eddca883c4175f14df8aedce21054bfca3adb70ffe76a9f607aef9d7fa2ea7f0"}, + {file = "anyio-3.7.0.tar.gz", hash = "sha256:275d9973793619a5374e1c89a4f4ad3f4b0a5510a2b5b939444bee8f4c4d37ce"}, +] + +[package.dependencies] +exceptiongroup = {version = "*", markers = "python_version < \"3.11\""} +idna = ">=2.8" +sniffio = ">=1.1" + +[package.extras] +doc = ["Sphinx (>=6.1.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme", "sphinxcontrib-jquery"] +test = ["anyio[trio]", "coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "mock (>=4)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] +trio = ["trio (<0.22)"] + [[package]] name = "applicationinsights" version = "0.11.10" @@ -111,6 +329,18 @@ six = "*" [package.extras] test = ["astroid", "pytest"] +[[package]] +name = "async-timeout" +version = "4.0.2" +description = "Timeout context manager for asyncio programs" +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ + {file = "async-timeout-4.0.2.tar.gz", hash = "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15"}, + {file = "async_timeout-4.0.2-py3-none-any.whl", hash = "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"}, +] + [[package]] name = "attrs" version = "23.1.0" @@ -1966,18 +2196,18 @@ black = ">=22.1.0" [[package]] name = "boto3" -version = "1.26.130" +version = "1.26.76" description = "The AWS SDK for Python" category = "dev" optional = false python-versions = ">= 3.7" files = [ - {file = "boto3-1.26.130-py3-none-any.whl", hash = "sha256:d6f9c6ebf417260ea5fa7a227e7bce9451f1f5010be05ac4075596356f584455"}, - {file = "boto3-1.26.130.tar.gz", hash = "sha256:3ae2b34921bb08a1d7ce52db9ec1a25159fca779648e596ede37e1049ed77de8"}, + {file = "boto3-1.26.76-py3-none-any.whl", hash = "sha256:b4c2969b7677762914394b8273cc1905dfe5b71f250741c1a575487ae357e729"}, + {file = "boto3-1.26.76.tar.gz", hash = "sha256:30c7d967ed1c6b5a05643e42cae9d4d36c3f1cb6782637ddc7007a104cfd9027"}, ] [package.dependencies] -botocore = ">=1.29.130,<1.30.0" +botocore = ">=1.29.76,<1.30.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.6.0,<0.7.0" @@ -2362,14 +2592,14 @@ xray = ["mypy-boto3-xray (>=1.26.0,<1.27.0)"] [[package]] name = "botocore" -version = "1.29.130" +version = "1.29.76" description = "Low-level, data-driven core of boto 3." category = "dev" optional = false python-versions = ">= 3.7" files = [ - {file = "botocore-1.29.130-py3-none-any.whl", hash = "sha256:56d1f54c3f8e140f965e5300d1cc5b565cb758134d9213fb05e91e1bb160330e"}, - {file = "botocore-1.29.130.tar.gz", hash = "sha256:3a31293b84ecfe5f5f2c4b7dc85a77d7b890b468a376b593fde15cacc76862dd"}, + {file = "botocore-1.29.76-py3-none-any.whl", hash = "sha256:70735b00cd529f152992231ca6757e458e5ec25db43767b3526e9a35b2f143b7"}, + {file = "botocore-1.29.76.tar.gz", hash = "sha256:c2f67b6b3f8acf2968eafca06526f07b9fb0d27bac4c68a635d51abb675134a7"}, ] [package.dependencies] @@ -2879,7 +3109,7 @@ idna = ">=2.0.0" name = "exceptiongroup" version = "1.1.1" description = "Backport of PEP 654 (exception groups)" -category = "dev" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3106,6 +3336,90 @@ files = [ astor = ">=0.1" flake8 = ">=3.7" +[[package]] +name = "frozenlist" +version = "1.3.3" +description = "A list-like structure which implements collections.abc.MutableSequence" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "frozenlist-1.3.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ff8bf625fe85e119553b5383ba0fb6aa3d0ec2ae980295aaefa552374926b3f4"}, + {file = "frozenlist-1.3.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dfbac4c2dfcc082fcf8d942d1e49b6aa0766c19d3358bd86e2000bf0fa4a9cf0"}, + {file = "frozenlist-1.3.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b1c63e8d377d039ac769cd0926558bb7068a1f7abb0f003e3717ee003ad85530"}, + {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7fdfc24dcfce5b48109867c13b4cb15e4660e7bd7661741a391f821f23dfdca7"}, + {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2c926450857408e42f0bbc295e84395722ce74bae69a3b2aa2a65fe22cb14b99"}, + {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1841e200fdafc3d51f974d9d377c079a0694a8f06de2e67b48150328d66d5483"}, + {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f470c92737afa7d4c3aacc001e335062d582053d4dbe73cda126f2d7031068dd"}, + {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:783263a4eaad7c49983fe4b2e7b53fa9770c136c270d2d4bbb6d2192bf4d9caf"}, + {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:924620eef691990dfb56dc4709f280f40baee568c794b5c1885800c3ecc69816"}, + {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ae4dc05c465a08a866b7a1baf360747078b362e6a6dbeb0c57f234db0ef88ae0"}, + {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:bed331fe18f58d844d39ceb398b77d6ac0b010d571cba8267c2e7165806b00ce"}, + {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:02c9ac843e3390826a265e331105efeab489ffaf4dd86384595ee8ce6d35ae7f"}, + {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9545a33965d0d377b0bc823dcabf26980e77f1b6a7caa368a365a9497fb09420"}, + {file = "frozenlist-1.3.3-cp310-cp310-win32.whl", hash = "sha256:d5cd3ab21acbdb414bb6c31958d7b06b85eeb40f66463c264a9b343a4e238642"}, + {file = "frozenlist-1.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:b756072364347cb6aa5b60f9bc18e94b2f79632de3b0190253ad770c5df17db1"}, + {file = "frozenlist-1.3.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b4395e2f8d83fbe0c627b2b696acce67868793d7d9750e90e39592b3626691b7"}, + {file = "frozenlist-1.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:14143ae966a6229350021384870458e4777d1eae4c28d1a7aa47f24d030e6678"}, + {file = "frozenlist-1.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5d8860749e813a6f65bad8285a0520607c9500caa23fea6ee407e63debcdbef6"}, + {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23d16d9f477bb55b6154654e0e74557040575d9d19fe78a161bd33d7d76808e8"}, + {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb82dbba47a8318e75f679690190c10a5e1f447fbf9df41cbc4c3afd726d88cb"}, + {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9309869032abb23d196cb4e4db574232abe8b8be1339026f489eeb34a4acfd91"}, + {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a97b4fe50b5890d36300820abd305694cb865ddb7885049587a5678215782a6b"}, + {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c188512b43542b1e91cadc3c6c915a82a5eb95929134faf7fd109f14f9892ce4"}, + {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:303e04d422e9b911a09ad499b0368dc551e8c3cd15293c99160c7f1f07b59a48"}, + {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:0771aed7f596c7d73444c847a1c16288937ef988dc04fb9f7be4b2aa91db609d"}, + {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:66080ec69883597e4d026f2f71a231a1ee9887835902dbe6b6467d5a89216cf6"}, + {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:41fe21dc74ad3a779c3d73a2786bdf622ea81234bdd4faf90b8b03cad0c2c0b4"}, + {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f20380df709d91525e4bee04746ba612a4df0972c1b8f8e1e8af997e678c7b81"}, + {file = "frozenlist-1.3.3-cp311-cp311-win32.whl", hash = "sha256:f30f1928162e189091cf4d9da2eac617bfe78ef907a761614ff577ef4edfb3c8"}, + {file = "frozenlist-1.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:a6394d7dadd3cfe3f4b3b186e54d5d8504d44f2d58dcc89d693698e8b7132b32"}, + {file = "frozenlist-1.3.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8df3de3a9ab8325f94f646609a66cbeeede263910c5c0de0101079ad541af332"}, + {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0693c609e9742c66ba4870bcee1ad5ff35462d5ffec18710b4ac89337ff16e27"}, + {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd4210baef299717db0a600d7a3cac81d46ef0e007f88c9335db79f8979c0d3d"}, + {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:394c9c242113bfb4b9aa36e2b80a05ffa163a30691c7b5a29eba82e937895d5e"}, + {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6327eb8e419f7d9c38f333cde41b9ae348bec26d840927332f17e887a8dcb70d"}, + {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e24900aa13212e75e5b366cb9065e78bbf3893d4baab6052d1aca10d46d944c"}, + {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3843f84a6c465a36559161e6c59dce2f2ac10943040c2fd021cfb70d58c4ad56"}, + {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:84610c1502b2461255b4c9b7d5e9c48052601a8957cd0aea6ec7a7a1e1fb9420"}, + {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:c21b9aa40e08e4f63a2f92ff3748e6b6c84d717d033c7b3438dd3123ee18f70e"}, + {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:efce6ae830831ab6a22b9b4091d411698145cb9b8fc869e1397ccf4b4b6455cb"}, + {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:40de71985e9042ca00b7953c4f41eabc3dc514a2d1ff534027f091bc74416401"}, + {file = "frozenlist-1.3.3-cp37-cp37m-win32.whl", hash = "sha256:180c00c66bde6146a860cbb81b54ee0df350d2daf13ca85b275123bbf85de18a"}, + {file = "frozenlist-1.3.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9bbbcedd75acdfecf2159663b87f1bb5cfc80e7cd99f7ddd9d66eb98b14a8411"}, + {file = "frozenlist-1.3.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:034a5c08d36649591be1cbb10e09da9f531034acfe29275fc5454a3b101ce41a"}, + {file = "frozenlist-1.3.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ba64dc2b3b7b158c6660d49cdb1d872d1d0bf4e42043ad8d5006099479a194e5"}, + {file = "frozenlist-1.3.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:47df36a9fe24054b950bbc2db630d508cca3aa27ed0566c0baf661225e52c18e"}, + {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:008a054b75d77c995ea26629ab3a0c0d7281341f2fa7e1e85fa6153ae29ae99c"}, + {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:841ea19b43d438a80b4de62ac6ab21cfe6827bb8a9dc62b896acc88eaf9cecba"}, + {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e235688f42b36be2b6b06fc37ac2126a73b75fb8d6bc66dd632aa35286238703"}, + {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca713d4af15bae6e5d79b15c10c8522859a9a89d3b361a50b817c98c2fb402a2"}, + {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ac5995f2b408017b0be26d4a1d7c61bce106ff3d9e3324374d66b5964325448"}, + {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a4ae8135b11652b08a8baf07631d3ebfe65a4c87909dbef5fa0cdde440444ee4"}, + {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4ea42116ceb6bb16dbb7d526e242cb6747b08b7710d9782aa3d6732bd8d27649"}, + {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:810860bb4bdce7557bc0febb84bbd88198b9dbc2022d8eebe5b3590b2ad6c842"}, + {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:ee78feb9d293c323b59a6f2dd441b63339a30edf35abcb51187d2fc26e696d13"}, + {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0af2e7c87d35b38732e810befb9d797a99279cbb85374d42ea61c1e9d23094b3"}, + {file = "frozenlist-1.3.3-cp38-cp38-win32.whl", hash = "sha256:899c5e1928eec13fd6f6d8dc51be23f0d09c5281e40d9cf4273d188d9feeaf9b"}, + {file = "frozenlist-1.3.3-cp38-cp38-win_amd64.whl", hash = "sha256:7f44e24fa70f6fbc74aeec3e971f60a14dde85da364aa87f15d1be94ae75aeef"}, + {file = "frozenlist-1.3.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2b07ae0c1edaa0a36339ec6cce700f51b14a3fc6545fdd32930d2c83917332cf"}, + {file = "frozenlist-1.3.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ebb86518203e12e96af765ee89034a1dbb0c3c65052d1b0c19bbbd6af8a145e1"}, + {file = "frozenlist-1.3.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5cf820485f1b4c91e0417ea0afd41ce5cf5965011b3c22c400f6d144296ccbc0"}, + {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c11e43016b9024240212d2a65043b70ed8dfd3b52678a1271972702d990ac6d"}, + {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8fa3c6e3305aa1146b59a09b32b2e04074945ffcfb2f0931836d103a2c38f936"}, + {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:352bd4c8c72d508778cf05ab491f6ef36149f4d0cb3c56b1b4302852255d05d5"}, + {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:65a5e4d3aa679610ac6e3569e865425b23b372277f89b5ef06cf2cdaf1ebf22b"}, + {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e2c1185858d7e10ff045c496bbf90ae752c28b365fef2c09cf0fa309291669"}, + {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f163d2fd041c630fed01bc48d28c3ed4a3b003c00acd396900e11ee5316b56bb"}, + {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:05cdb16d09a0832eedf770cb7bd1fe57d8cf4eaf5aced29c4e41e3f20b30a784"}, + {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:8bae29d60768bfa8fb92244b74502b18fae55a80eac13c88eb0b496d4268fd2d"}, + {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:eedab4c310c0299961ac285591acd53dc6723a1ebd90a57207c71f6e0c2153ab"}, + {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3bbdf44855ed8f0fbcd102ef05ec3012d6a4fd7c7562403f76ce6a52aeffb2b1"}, + {file = "frozenlist-1.3.3-cp39-cp39-win32.whl", hash = "sha256:efa568b885bca461f7c7b9e032655c0c143d305bf01c30caf6db2854a4532b38"}, + {file = "frozenlist-1.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:cfe33efc9cb900a4c46f91a5ceba26d6df370ffddd9ca386eb1d4f0ad97b9ea9"}, + {file = "frozenlist-1.3.3.tar.gz", hash = "sha256:58bcc55721e8a90b88332d6cd441261ebb22342e238296bb330968952fbb3a6a"}, +] + [[package]] name = "furo" version = "2022.12.7" @@ -3858,6 +4172,90 @@ adal = ">=0.6.0,<2.0.0" msrest = ">=0.6.0,<2.0.0" six = "*" +[[package]] +name = "multidict" +version = "6.0.4" +description = "multidict implementation" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b1a97283e0c85772d613878028fec909f003993e1007eafa715b24b377cb9b8"}, + {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eeb6dcc05e911516ae3d1f207d4b0520d07f54484c49dfc294d6e7d63b734171"}, + {file = "multidict-6.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d6d635d5209b82a3492508cf5b365f3446afb65ae7ebd755e70e18f287b0adf7"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c048099e4c9e9d615545e2001d3d8a4380bd403e1a0578734e0d31703d1b0c0b"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea20853c6dbbb53ed34cb4d080382169b6f4554d394015f1bef35e881bf83547"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16d232d4e5396c2efbbf4f6d4df89bfa905eb0d4dc5b3549d872ab898451f569"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36c63aaa167f6c6b04ef2c85704e93af16c11d20de1d133e39de6a0e84582a93"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:64bdf1086b6043bf519869678f5f2757f473dee970d7abf6da91ec00acb9cb98"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:43644e38f42e3af682690876cff722d301ac585c5b9e1eacc013b7a3f7b696a0"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7582a1d1030e15422262de9f58711774e02fa80df0d1578995c76214f6954988"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ddff9c4e225a63a5afab9dd15590432c22e8057e1a9a13d28ed128ecf047bbdc"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ee2a1ece51b9b9e7752e742cfb661d2a29e7bcdba2d27e66e28a99f1890e4fa0"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a2e4369eb3d47d2034032a26c7a80fcb21a2cb22e1173d761a162f11e562caa5"}, + {file = "multidict-6.0.4-cp310-cp310-win32.whl", hash = "sha256:574b7eae1ab267e5f8285f0fe881f17efe4b98c39a40858247720935b893bba8"}, + {file = "multidict-6.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:4dcbb0906e38440fa3e325df2359ac6cb043df8e58c965bb45f4e406ecb162cc"}, + {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0dfad7a5a1e39c53ed00d2dd0c2e36aed4650936dc18fd9a1826a5ae1cad6f03"}, + {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:64da238a09d6039e3bd39bb3aee9c21a5e34f28bfa5aa22518581f910ff94af3"}, + {file = "multidict-6.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ff959bee35038c4624250473988b24f846cbeb2c6639de3602c073f10410ceba"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01a3a55bd90018c9c080fbb0b9f4891db37d148a0a18722b42f94694f8b6d4c9"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c5cb09abb18c1ea940fb99360ea0396f34d46566f157122c92dfa069d3e0e982"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:666daae833559deb2d609afa4490b85830ab0dfca811a98b70a205621a6109fe"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11bdf3f5e1518b24530b8241529d2050014c884cf18b6fc69c0c2b30ca248710"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d18748f2d30f94f498e852c67d61261c643b349b9d2a581131725595c45ec6c"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:458f37be2d9e4c95e2d8866a851663cbc76e865b78395090786f6cd9b3bbf4f4"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b1a2eeedcead3a41694130495593a559a668f382eee0727352b9a41e1c45759a"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7d6ae9d593ef8641544d6263c7fa6408cc90370c8cb2bbb65f8d43e5b0351d9c"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5979b5632c3e3534e42ca6ff856bb24b2e3071b37861c2c727ce220d80eee9ed"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dcfe792765fab89c365123c81046ad4103fcabbc4f56d1c1997e6715e8015461"}, + {file = "multidict-6.0.4-cp311-cp311-win32.whl", hash = "sha256:3601a3cece3819534b11d4efc1eb76047488fddd0c85a3948099d5da4d504636"}, + {file = "multidict-6.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:81a4f0b34bd92df3da93315c6a59034df95866014ac08535fc819f043bfd51f0"}, + {file = "multidict-6.0.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:67040058f37a2a51ed8ea8f6b0e6ee5bd78ca67f169ce6122f3e2ec80dfe9b78"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:853888594621e6604c978ce2a0444a1e6e70c8d253ab65ba11657659dcc9100f"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:39ff62e7d0f26c248b15e364517a72932a611a9b75f35b45be078d81bdb86603"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af048912e045a2dc732847d33821a9d84ba553f5c5f028adbd364dd4765092ac"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e8b901e607795ec06c9e42530788c45ac21ef3aaa11dbd0c69de543bfb79a9"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62501642008a8b9871ddfccbf83e4222cf8ac0d5aeedf73da36153ef2ec222d2"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:99b76c052e9f1bc0721f7541e5e8c05db3941eb9ebe7b8553c625ef88d6eefde"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:509eac6cf09c794aa27bcacfd4d62c885cce62bef7b2c3e8b2e49d365b5003fe"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:21a12c4eb6ddc9952c415f24eef97e3e55ba3af61f67c7bc388dcdec1404a067"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:5cad9430ab3e2e4fa4a2ef4450f548768400a2ac635841bc2a56a2052cdbeb87"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab55edc2e84460694295f401215f4a58597f8f7c9466faec545093045476327d"}, + {file = "multidict-6.0.4-cp37-cp37m-win32.whl", hash = "sha256:5a4dcf02b908c3b8b17a45fb0f15b695bf117a67b76b7ad18b73cf8e92608775"}, + {file = "multidict-6.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6ed5f161328b7df384d71b07317f4d8656434e34591f20552c7bcef27b0ab88e"}, + {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5fc1b16f586f049820c5c5b17bb4ee7583092fa0d1c4e28b5239181ff9532e0c"}, + {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1502e24330eb681bdaa3eb70d6358e818e8e8f908a22a1851dfd4e15bc2f8161"}, + {file = "multidict-6.0.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b692f419760c0e65d060959df05f2a531945af31fda0c8a3b3195d4efd06de11"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45e1ecb0379bfaab5eef059f50115b54571acfbe422a14f668fc8c27ba410e7e"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddd3915998d93fbcd2566ddf9cf62cdb35c9e093075f862935573d265cf8f65d"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:59d43b61c59d82f2effb39a93c48b845efe23a3852d201ed2d24ba830d0b4cf2"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc8e1d0c705233c5dd0c5e6460fbad7827d5d36f310a0fadfd45cc3029762258"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6aa0418fcc838522256761b3415822626f866758ee0bc6632c9486b179d0b52"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6748717bb10339c4760c1e63da040f5f29f5ed6e59d76daee30305894069a660"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4d1a3d7ef5e96b1c9e92f973e43aa5e5b96c659c9bc3124acbbd81b0b9c8a951"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4372381634485bec7e46718edc71528024fcdc6f835baefe517b34a33c731d60"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:fc35cb4676846ef752816d5be2193a1e8367b4c1397b74a565a9d0389c433a1d"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b9d9e4e2b37daddb5c23ea33a3417901fa7c7b3dee2d855f63ee67a0b21e5b1"}, + {file = "multidict-6.0.4-cp38-cp38-win32.whl", hash = "sha256:e41b7e2b59679edfa309e8db64fdf22399eec4b0b24694e1b2104fb789207779"}, + {file = "multidict-6.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:d6c254ba6e45d8e72739281ebc46ea5eb5f101234f3ce171f0e9f5cc86991480"}, + {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16ab77bbeb596e14212e7bab8429f24c1579234a3a462105cda4a66904998664"}, + {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc779e9e6f7fda81b3f9aa58e3a6091d49ad528b11ed19f6621408806204ad35"}, + {file = "multidict-6.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ceef517eca3e03c1cceb22030a3e39cb399ac86bff4e426d4fc6ae49052cc60"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:281af09f488903fde97923c7744bb001a9b23b039a909460d0f14edc7bf59706"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52f2dffc8acaba9a2f27174c41c9e57f60b907bb9f096b36b1a1f3be71c6284d"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b41156839806aecb3641f3208c0dafd3ac7775b9c4c422d82ee2a45c34ba81ca"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3fc56f88cc98ef8139255cf8cd63eb2c586531e43310ff859d6bb3a6b51f1"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8316a77808c501004802f9beebde51c9f857054a0c871bd6da8280e718444449"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f70b98cd94886b49d91170ef23ec5c0e8ebb6f242d734ed7ed677b24d50c82cf"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bf6774e60d67a9efe02b3616fee22441d86fab4c6d335f9d2051d19d90a40063"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:e69924bfcdda39b722ef4d9aa762b2dd38e4632b3641b1d9a57ca9cd18f2f83a"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:6b181d8c23da913d4ff585afd1155a0e1194c0b50c54fcfe286f70cdaf2b7176"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:52509b5be062d9eafc8170e53026fbc54cf3b32759a23d07fd935fb04fc22d95"}, + {file = "multidict-6.0.4-cp39-cp39-win32.whl", hash = "sha256:27c523fbfbdfd19c6867af7346332b62b586eed663887392cff78d614f9ec313"}, + {file = "multidict-6.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:33029f5734336aa0d4c0384525da0387ef89148dc7191aae00ca5fb23d7aafc2"}, + {file = "multidict-6.0.4.tar.gz", hash = "sha256:3666906492efb76453c0e7b97f2cf459b0682e7402c0489a95484965dbc1da49"}, +] + [[package]] name = "mypy" version = "0.942" @@ -5113,6 +5511,18 @@ files = [ {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] +[[package]] +name = "sniffio" +version = "1.3.0" +description = "Sniff out which async library your code is running under" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, + {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, +] + [[package]] name = "snowballstemmer" version = "2.2.0" @@ -5771,6 +6181,94 @@ files = [ {file = "xmltodict-0.13.0.tar.gz", hash = "sha256:341595a488e3e01a85a9d8911d8912fd922ede5fecc4dce437eb4b6c8d037e56"}, ] +[[package]] +name = "yarl" +version = "1.9.2" +description = "Yet another URL library" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "yarl-1.9.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c2ad583743d16ddbdf6bb14b5cd76bf43b0d0006e918809d5d4ddf7bde8dd82"}, + {file = "yarl-1.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:82aa6264b36c50acfb2424ad5ca537a2060ab6de158a5bd2a72a032cc75b9eb8"}, + {file = "yarl-1.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c0c77533b5ed4bcc38e943178ccae29b9bcf48ffd1063f5821192f23a1bd27b9"}, + {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee4afac41415d52d53a9833ebae7e32b344be72835bbb589018c9e938045a560"}, + {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9bf345c3a4f5ba7f766430f97f9cc1320786f19584acc7086491f45524a551ac"}, + {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a96c19c52ff442a808c105901d0bdfd2e28575b3d5f82e2f5fd67e20dc5f4ea"}, + {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:891c0e3ec5ec881541f6c5113d8df0315ce5440e244a716b95f2525b7b9f3608"}, + {file = "yarl-1.9.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c3a53ba34a636a256d767c086ceb111358876e1fb6b50dfc4d3f4951d40133d5"}, + {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:566185e8ebc0898b11f8026447eacd02e46226716229cea8db37496c8cdd26e0"}, + {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:2b0738fb871812722a0ac2154be1f049c6223b9f6f22eec352996b69775b36d4"}, + {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:32f1d071b3f362c80f1a7d322bfd7b2d11e33d2adf395cc1dd4df36c9c243095"}, + {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:e9fdc7ac0d42bc3ea78818557fab03af6181e076a2944f43c38684b4b6bed8e3"}, + {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:56ff08ab5df8429901ebdc5d15941b59f6253393cb5da07b4170beefcf1b2528"}, + {file = "yarl-1.9.2-cp310-cp310-win32.whl", hash = "sha256:8ea48e0a2f931064469bdabca50c2f578b565fc446f302a79ba6cc0ee7f384d3"}, + {file = "yarl-1.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:50f33040f3836e912ed16d212f6cc1efb3231a8a60526a407aeb66c1c1956dde"}, + {file = "yarl-1.9.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:646d663eb2232d7909e6601f1a9107e66f9791f290a1b3dc7057818fe44fc2b6"}, + {file = "yarl-1.9.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:aff634b15beff8902d1f918012fc2a42e0dbae6f469fce134c8a0dc51ca423bb"}, + {file = "yarl-1.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a83503934c6273806aed765035716216cc9ab4e0364f7f066227e1aaea90b8d0"}, + {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b25322201585c69abc7b0e89e72790469f7dad90d26754717f3310bfe30331c2"}, + {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:22a94666751778629f1ec4280b08eb11815783c63f52092a5953faf73be24191"}, + {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ec53a0ea2a80c5cd1ab397925f94bff59222aa3cf9c6da938ce05c9ec20428d"}, + {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:159d81f22d7a43e6eabc36d7194cb53f2f15f498dbbfa8edc8a3239350f59fe7"}, + {file = "yarl-1.9.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:832b7e711027c114d79dffb92576acd1bd2decc467dec60e1cac96912602d0e6"}, + {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:95d2ecefbcf4e744ea952d073c6922e72ee650ffc79028eb1e320e732898d7e8"}, + {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d4e2c6d555e77b37288eaf45b8f60f0737c9efa3452c6c44626a5455aeb250b9"}, + {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:783185c75c12a017cc345015ea359cc801c3b29a2966c2655cd12b233bf5a2be"}, + {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:b8cc1863402472f16c600e3e93d542b7e7542a540f95c30afd472e8e549fc3f7"}, + {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:822b30a0f22e588b32d3120f6d41e4ed021806418b4c9f0bc3048b8c8cb3f92a"}, + {file = "yarl-1.9.2-cp311-cp311-win32.whl", hash = "sha256:a60347f234c2212a9f0361955007fcf4033a75bf600a33c88a0a8e91af77c0e8"}, + {file = "yarl-1.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:be6b3fdec5c62f2a67cb3f8c6dbf56bbf3f61c0f046f84645cd1ca73532ea051"}, + {file = "yarl-1.9.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:38a3928ae37558bc1b559f67410df446d1fbfa87318b124bf5032c31e3447b74"}, + {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac9bb4c5ce3975aeac288cfcb5061ce60e0d14d92209e780c93954076c7c4367"}, + {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3da8a678ca8b96c8606bbb8bfacd99a12ad5dd288bc6f7979baddd62f71c63ef"}, + {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:13414591ff516e04fcdee8dc051c13fd3db13b673c7a4cb1350e6b2ad9639ad3"}, + {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf74d08542c3a9ea97bb8f343d4fcbd4d8f91bba5ec9d5d7f792dbe727f88938"}, + {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e7221580dc1db478464cfeef9b03b95c5852cc22894e418562997df0d074ccc"}, + {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:494053246b119b041960ddcd20fd76224149cfea8ed8777b687358727911dd33"}, + {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:52a25809fcbecfc63ac9ba0c0fb586f90837f5425edfd1ec9f3372b119585e45"}, + {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:e65610c5792870d45d7b68c677681376fcf9cc1c289f23e8e8b39c1485384185"}, + {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:1b1bba902cba32cdec51fca038fd53f8beee88b77efc373968d1ed021024cc04"}, + {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:662e6016409828ee910f5d9602a2729a8a57d74b163c89a837de3fea050c7582"}, + {file = "yarl-1.9.2-cp37-cp37m-win32.whl", hash = "sha256:f364d3480bffd3aa566e886587eaca7c8c04d74f6e8933f3f2c996b7f09bee1b"}, + {file = "yarl-1.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6a5883464143ab3ae9ba68daae8e7c5c95b969462bbe42e2464d60e7e2698368"}, + {file = "yarl-1.9.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5610f80cf43b6202e2c33ba3ec2ee0a2884f8f423c8f4f62906731d876ef4fac"}, + {file = "yarl-1.9.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b9a4e67ad7b646cd6f0938c7ebfd60e481b7410f574c560e455e938d2da8e0f4"}, + {file = "yarl-1.9.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:83fcc480d7549ccebe9415d96d9263e2d4226798c37ebd18c930fce43dfb9574"}, + {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fcd436ea16fee7d4207c045b1e340020e58a2597301cfbcfdbe5abd2356c2fb"}, + {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84e0b1599334b1e1478db01b756e55937d4614f8654311eb26012091be109d59"}, + {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3458a24e4ea3fd8930e934c129b676c27452e4ebda80fbe47b56d8c6c7a63a9e"}, + {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:838162460b3a08987546e881a2bfa573960bb559dfa739e7800ceeec92e64417"}, + {file = "yarl-1.9.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f4e2d08f07a3d7d3e12549052eb5ad3eab1c349c53ac51c209a0e5991bbada78"}, + {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:de119f56f3c5f0e2fb4dee508531a32b069a5f2c6e827b272d1e0ff5ac040333"}, + {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:149ddea5abf329752ea5051b61bd6c1d979e13fbf122d3a1f9f0c8be6cb6f63c"}, + {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:674ca19cbee4a82c9f54e0d1eee28116e63bc6fd1e96c43031d11cbab8b2afd5"}, + {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:9b3152f2f5677b997ae6c804b73da05a39daa6a9e85a512e0e6823d81cdad7cc"}, + {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5415d5a4b080dc9612b1b63cba008db84e908b95848369aa1da3686ae27b6d2b"}, + {file = "yarl-1.9.2-cp38-cp38-win32.whl", hash = "sha256:f7a3d8146575e08c29ed1cd287068e6d02f1c7bdff8970db96683b9591b86ee7"}, + {file = "yarl-1.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:63c48f6cef34e6319a74c727376e95626f84ea091f92c0250a98e53e62c77c72"}, + {file = "yarl-1.9.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:75df5ef94c3fdc393c6b19d80e6ef1ecc9ae2f4263c09cacb178d871c02a5ba9"}, + {file = "yarl-1.9.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c027a6e96ef77d401d8d5a5c8d6bc478e8042f1e448272e8d9752cb0aff8b5c8"}, + {file = "yarl-1.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3b078dbe227f79be488ffcfc7a9edb3409d018e0952cf13f15fd6512847f3f7"}, + {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59723a029760079b7d991a401386390c4be5bfec1e7dd83e25a6a0881859e716"}, + {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b03917871bf859a81ccb180c9a2e6c1e04d2f6a51d953e6a5cdd70c93d4e5a2a"}, + {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c1012fa63eb6c032f3ce5d2171c267992ae0c00b9e164efe4d73db818465fac3"}, + {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a74dcbfe780e62f4b5a062714576f16c2f3493a0394e555ab141bf0d746bb955"}, + {file = "yarl-1.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8c56986609b057b4839968ba901944af91b8e92f1725d1a2d77cbac6972b9ed1"}, + {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2c315df3293cd521033533d242d15eab26583360b58f7ee5d9565f15fee1bef4"}, + {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:b7232f8dfbd225d57340e441d8caf8652a6acd06b389ea2d3222b8bc89cbfca6"}, + {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:53338749febd28935d55b41bf0bcc79d634881195a39f6b2f767870b72514caf"}, + {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:066c163aec9d3d073dc9ffe5dd3ad05069bcb03fcaab8d221290ba99f9f69ee3"}, + {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8288d7cd28f8119b07dd49b7230d6b4562f9b61ee9a4ab02221060d21136be80"}, + {file = "yarl-1.9.2-cp39-cp39-win32.whl", hash = "sha256:b124e2a6d223b65ba8768d5706d103280914d61f5cae3afbc50fc3dfcc016623"}, + {file = "yarl-1.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:61016e7d582bc46a5378ffdd02cd0314fb8ba52f40f9cf4d9a5e7dbef88dee18"}, + {file = "yarl-1.9.2.tar.gz", hash = "sha256:04ab9d4b9f587c06d801c2abfe9317b77cdf996c65a90d5e84ecc45010823571"}, +] + +[package.dependencies] +idna = ">=2.0" +multidict = ">=4.0" + [[package]] name = "zipp" version = "3.15.0" @@ -5790,4 +6288,4 @@ testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "7cc4ce5110c5a5eca8acda5f34ed21ee9dcef052a70bbf62f0bbe4e0d8ec36c1" +content-hash = "f0ab89fbd5465bbb4a546cd0edcfcabd96cc11df42526ad333353cb0c765cd35" diff --git a/pyproject.toml b/pyproject.toml index 11211e5..82000db 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -42,6 +42,7 @@ censys-cc = "censys.cloud_connectors.common.cli:main" [tool.poetry.dependencies] python = "^3.9" +aiometer = "^0.4.0" backoff = "^2.2.1" censys = "^2.2.2" inquirerpy = "^0.3.3" @@ -98,6 +99,7 @@ sphinxcontrib-asciinema = "^0.3.6" [tool.poetry.group.aws.dependencies] boto3 = "^1.24.63" boto3-stubs = {extras = ["apigateway", "apigatewayv2", "ec2", "ecs", "elb", "elbv2", "rds", "route53", "route53domains", "s3", "sts"], version = "^1.24.63"} +aioboto3 = "^11.2.0" [tool.poetry.group.azure.dependencies] azure-cli = "^2.48.1" diff --git a/src/censys/cloud_connectors/azure_connector/connector.py b/src/censys/cloud_connectors/azure_connector/connector.py index 5a13465..ab252ce 100644 --- a/src/censys/cloud_connectors/azure_connector/connector.py +++ b/src/censys/cloud_connectors/azure_connector/connector.py @@ -1,21 +1,20 @@ """Azure Cloud Connector.""" -from collections.abc import Generator -from typing import Optional +from typing import Any, AsyncGenerator, Optional from azure.core.exceptions import ( ClientAuthenticationError, HttpResponseError, ServiceRequestError, ) -from azure.identity import ClientSecretCredential -from azure.mgmt.containerinstance import ContainerInstanceManagementClient -from azure.mgmt.dns import DnsManagementClient -from azure.mgmt.network import NetworkManagementClient -from azure.mgmt.sql import SqlManagementClient -from azure.mgmt.storage import StorageManagementClient +from azure.identity.aio import ClientSecretCredential +from azure.mgmt.containerinstance.aio import ContainerInstanceManagementClient +from azure.mgmt.dns.aio import DnsManagementClient +from azure.mgmt.network.aio import NetworkManagementClient +from azure.mgmt.sql.aio import SqlManagementClient +from azure.mgmt.storage.aio import StorageManagementClient from azure.mgmt.storage.models import StorageAccount -from azure.storage.blob import BlobServiceClient, ContainerProperties -from msrest.serialization import Model as AzureModel +from azure.storage.blob import ContainerProperties +from azure.storage.blob.aio import BlobServiceClient, ContainerClient from censys.cloud_connectors.common.cloud_asset import AzureContainerAsset from censys.cloud_connectors.common.connector import CloudConnector @@ -33,9 +32,6 @@ class AzureCloudConnector(CloudConnector): """Azure Cloud Connector.""" provider = ProviderEnum.AZURE - subscription_id: str - credentials: ClientSecretCredential - provider_settings: AzureSpecificSettings def __init__(self, settings: Settings): """Initialize Azure Cloud Connector. @@ -54,47 +50,68 @@ def __init__(self, settings: Settings): AzureResourceTypes.STORAGE_ACCOUNTS: self.get_storage_containers, } - def scan(self): - """Scan Azure Subscription.""" + async def scan( + self, + provider_settings: AzureSpecificSettings, + credentials: ClientSecretCredential, + subscription_id: str, + ): + """Scan Azure Subscription. + + Args: + provider_settings (AzureSpecificSettings): Azure provider settings. + credentials (ClientSecretCredential): Azure credentials. + subscription_id (str): Azure subscription ID. + """ with Healthcheck( self.settings, - self.provider_settings, - provider={"subscription_id": self.subscription_id}, + provider_settings, + provider={"subscription_id": subscription_id}, exception_map={ ClientAuthenticationError: "PERMISSIONS", }, ): - super().scan() + await super().scan( + provider_settings, + credentials=credentials, + subscription_id=subscription_id, + ) - def scan_all(self): + async def scan_all(self): """Scan all Azure Subscriptions.""" provider_settings: dict[ tuple, AzureSpecificSettings - ] = self.settings.providers.get(self.provider, {}) + ] = self.settings.providers.get( + self.provider, {} + ) # type: ignore for provider_setting in provider_settings.values(): - self.provider_settings = provider_setting - self.credentials = ClientSecretCredential( + credentials = ClientSecretCredential( tenant_id=provider_setting.tenant_id, client_id=provider_setting.client_id, client_secret=provider_setting.client_secret, ) - for subscription_id in self.provider_settings.subscription_id: + for subscription_id in provider_setting.subscription_id: self.logger.info(f"Scanning Azure Subscription {subscription_id}") - self.subscription_id = subscription_id try: - self.scan() + await self.scan(provider_setting, credentials, subscription_id) except Exception as e: self.logger.error( f"Unable to scan Azure Subscription {subscription_id}. Error: {e}" ) self.dispatch_event(EventTypeEnum.SCAN_FAILED, exception=e) - self.subscription_id = None - def format_label(self, asset: AzureModel) -> str: + await credentials.close() + + def format_label( + self, + asset: Any, + subscription_id: str, + ) -> str: """Format Azure asset label. Args: - asset (AzureModel): Azure asset. + asset (Any): Azure asset. + subscription_id (str): Azure subscription ID. Returns: str: Formatted label. @@ -105,24 +122,53 @@ def format_label(self, asset: AzureModel) -> str: asset_location: Optional[str] = getattr(asset, "location", None) if not asset_location: raise ValueError("Asset has no location.") - return f"{self.label_prefix}{self.subscription_id}/{asset_location}" + return f"{self.label_prefix}{subscription_id}/{asset_location}" + + async def get_ip_addresses( + self, + _: AzureSpecificSettings, + credentials: ClientSecretCredential, + subscription_id: str, + current_service: AzureResourceTypes, + ): + """Get Azure IP addresses. - def get_ip_addresses(self): - """Get Azure IP addresses.""" - network_client = NetworkManagementClient(self.credentials, self.subscription_id) - for asset in network_client.public_ip_addresses.list_all(): + Args: + credentials (ClientSecretCredential): Azure credentials. + subscription_id (str): Azure subscription ID. + current_service (AzureResourceTypes): Azure resource type. + """ + network_client = NetworkManagementClient(credentials, subscription_id) # type: ignore + async for asset in network_client.public_ip_addresses.list_all(): asset_dict = asset.as_dict() if ip_address := asset_dict.get("ip_address"): with SuppressValidationError(): - ip_seed = IpSeed(value=ip_address, label=self.format_label(asset)) - self.add_seed(ip_seed) + ip_seed = IpSeed( + value=ip_address, + label=self.format_label(asset, subscription_id), + ) + self.add_seed(ip_seed, service=current_service) + + await network_client.close() - def get_clusters(self): - """Get Azure clusters.""" + async def get_clusters( + self, + _: AzureSpecificSettings, + credentials: ClientSecretCredential, + subscription_id: str, + current_service: AzureResourceTypes, + ): + """Get Azure clusters. + + Args: + credentials (ClientSecretCredential): Azure credentials. + subscription_id (str): Azure subscription ID. + current_service (AzureResourceTypes): Azure resource type. + """ container_client = ContainerInstanceManagementClient( - self.credentials, self.subscription_id + credentials, subscription_id # type: ignore ) - for asset in container_client.container_groups.list(): + async for asset in container_client.container_groups.list(): asset_dict = asset.as_dict() if ( (ip_address_dict := asset_dict.get("ip_address")) @@ -130,65 +176,99 @@ def get_clusters(self): and (ip_address := ip_address_dict.get("ip")) ): with SuppressValidationError(): - ip_seed = IpSeed(value=ip_address, label=self.format_label(asset)) - self.add_seed(ip_seed) + ip_seed = IpSeed( + value=ip_address, + label=self.format_label(asset, subscription_id), + ) + self.add_seed(ip_seed, service=current_service) if domain := ip_address_dict.get("fqdn"): with SuppressValidationError(): domain_seed = DomainSeed( - value=domain, label=self.format_label(asset) + value=domain, + label=self.format_label(asset, subscription_id), ) - self.add_seed(domain_seed) + self.add_seed(domain_seed, service=current_service) - def get_sql_servers(self): - """Get Azure SQL servers.""" - sql_client = SqlManagementClient(self.credentials, self.subscription_id) + await container_client.close() - for asset in sql_client.servers.list(): + async def get_sql_servers( + self, + _: AzureSpecificSettings, + credentials: ClientSecretCredential, + subscription_id: str, + current_service: AzureResourceTypes, + ): + """Get Azure SQL servers. + + Args: + credentials (ClientSecretCredential): Azure credentials. + subscription_id (str): Azure subscription ID. + current_service (AzureResourceTypes): Azure resource type. + """ + sql_client = SqlManagementClient(credentials, subscription_id) # type: ignore + async for asset in sql_client.servers.list(): asset_dict = asset.as_dict() if ( domain := asset_dict.get("fully_qualified_domain_name") ) and asset_dict.get("public_network_access") == "Enabled": with SuppressValidationError(): domain_seed = DomainSeed( - value=domain, label=self.format_label(asset) + value=domain, label=self.format_label(asset, subscription_id) ) - self.add_seed(domain_seed) + self.add_seed(domain_seed, service=current_service) + + await sql_client.close() - def get_dns_records(self): - """Get Azure DNS records.""" - dns_client = DnsManagementClient(self.credentials, self.subscription_id) + async def get_dns_records( + self, + _: AzureSpecificSettings, + credentials: ClientSecretCredential, + subscription_id: str, + current_service: AzureResourceTypes, + ): + """Get Azure DNS records. + + Args: + credentials (ClientSecretCredential): Azure credentials. + subscription_id (str): Azure subscription ID. + current_service (AzureResourceTypes): Azure resource type. + """ + dns_client = DnsManagementClient(credentials, subscription_id) # type: ignore try: - zones = list(dns_client.zones.list()) + # zones = list(dns_client.zones.list()) + zones = dns_client.zones.list() except HttpResponseError as error: # TODO: Better error handling here self.logger.error( f"Failed to get Azure DNS records: {error.message}", exc_info=True ) + await dns_client.close() return - for zone in zones: + async for zone in zones: zone_dict = zone.as_dict() # TODO: Do we need to check if zone is public? (ie. do we care?) if zone_dict.get("zone_type") != "Public": # pragma: no cover continue zone_resource_group = zone_dict.get("id").split("/")[4] - for asset in dns_client.record_sets.list_all_by_dns_zone( + async for asset in dns_client.record_sets.list_all_by_dns_zone( # type: ignore zone_resource_group, zone_dict.get("name") ): asset_dict = asset.as_dict() if domain_name := asset_dict.get("fqdn"): with SuppressValidationError(): domain_seed = DomainSeed( - value=domain_name, label=self.format_label(zone) + value=domain_name, + label=self.format_label(zone, subscription_id), ) - self.add_seed(domain_seed) + self.add_seed(domain_seed, service=current_service) if cname := asset_dict.get("cname_record", {}).get("cname"): with SuppressValidationError(): domain_seed = DomainSeed( - value=cname, label=self.format_label(zone) + value=cname, label=self.format_label(zone, subscription_id) ) - self.add_seed(domain_seed) + self.add_seed(domain_seed, service=current_service) for a_record in asset_dict.get("a_records", []): ip_address = a_record.get("ipv4_address") if not ip_address: @@ -196,65 +276,97 @@ def get_dns_records(self): with SuppressValidationError(): ip_seed = IpSeed( - value=ip_address, label=self.format_label(zone) + value=ip_address, + label=self.format_label(zone, subscription_id), ) - self.add_seed(ip_seed) + self.add_seed(ip_seed, service=current_service) + + await dns_client.close() - def _list_containers( - self, bucket_client: BlobServiceClient, account: StorageAccount - ) -> Generator[ContainerProperties, None, None]: + async def _list_containers( + self, blob_service_client: BlobServiceClient, account: StorageAccount + ) -> AsyncGenerator[ContainerProperties, None]: """List Azure containers. Args: - bucket_client (BlobServiceClient): Blob service client. + blob_service_client (BlobServiceClient): Blob service client. account (StorageAccount): Storage account. Yields: ContainerProperties: Azure container properties. """ try: - yield from bucket_client.list_containers() + async for container in blob_service_client.list_containers(): + yield container except HttpResponseError as error: self.logger.error( f"Failed to get Azure containers for {account.name}: {error.message}" ) + await blob_service_client.close() return - def get_storage_containers(self): - """Get Azure containers.""" - storage_client = StorageManagementClient(self.credentials, self.subscription_id) + async def get_storage_containers( + self, + _: AzureSpecificSettings, + credentials: ClientSecretCredential, + subscription_id: str, + current_service: AzureResourceTypes, + ): + """Get Azure containers. - for account in storage_client.storage_accounts.list(): - bucket_client = BlobServiceClient( - f"https://{account.name}.blob.core.windows.net/", self.credentials - ) + Args: + credentials (ClientSecretCredential): Azure credentials. + subscription_id (str): Azure subscription ID. + current_service (AzureResourceTypes): Azure resource type. + """ + storage_client = StorageManagementClient(credentials, subscription_id) # type: ignore + + async for account in storage_client.storage_accounts.list(): account_dict = account.as_dict() if (custom_domain := account_dict.get("custom_domain")) and ( domain := custom_domain.get("name") ): with SuppressValidationError(): domain_seed = DomainSeed( - value=domain, label=self.format_label(account) + value=domain, label=self.format_label(account, subscription_id) ) - self.add_seed(domain_seed) - uid = f"{self.subscription_id}/{self.credentials._tenant_id}/{account.name}" + self.add_seed(domain_seed, service=current_service) + uid = f"{subscription_id}/{credentials._client._tenant_id}/{account.name}" - for container in self._list_containers(bucket_client, account): + account_url = f"https://{account.name}.blob.core.windows.net/" + if ( + account.primary_endpoints is not None + and account.primary_endpoints.blob is not None + ): + account_url = account.primary_endpoints.blob + blob_service_client = BlobServiceClient(account_url, credentials) # type: ignore + async for container in self._list_containers(blob_service_client, account): # type: ignore + container_client: Optional[ContainerClient] = None try: - container_client = bucket_client.get_container_client(container) + container_client = blob_service_client.get_container_client( + container + ) container_url = container_client.url with SuppressValidationError(): - container_asset = AzureContainerAsset( + container_asset = AzureContainerAsset( # type: ignore value=container_url, uid=uid, scan_data={ - "accountNumber": self.subscription_id, + "accountNumber": subscription_id, "publicAccess": container.public_access, "location": account.location, }, ) - self.add_cloud_asset(container_asset) + self.add_cloud_asset(container_asset, service=current_service) + await container_client.close() except ServiceRequestError as error: # pragma: no cover self.logger.error( f"Failed to get Azure container {container} for {account.name}: {error.message}" ) + finally: + if container_client: + await container_client.close() + + await blob_service_client.close() + + await storage_client.close() diff --git a/src/censys/cloud_connectors/common/cli/__init__.py b/src/censys/cloud_connectors/common/cli/__init__.py index e7bb520..af2448b 100644 --- a/src/censys/cloud_connectors/common/cli/__init__.py +++ b/src/censys/cloud_connectors/common/cli/__init__.py @@ -1,5 +1,6 @@ #!/usr/bin/env python3 """Interact with the Censys Search API through the command line.""" +import asyncio import sys from typing import Optional @@ -28,7 +29,10 @@ def main(manual_args: Optional[list[str]] = None): sys.exit(0) try: - args.func(args) + if asyncio.iscoroutinefunction(args.func): + asyncio.run(args.func(args)) + else: + args.func(args) except KeyboardInterrupt: # pragma: no cover sys.exit(1) diff --git a/src/censys/cloud_connectors/common/cli/commands/scan.py b/src/censys/cloud_connectors/common/cli/commands/scan.py index 1dc4be9..6ebf1dd 100644 --- a/src/censys/cloud_connectors/common/cli/commands/scan.py +++ b/src/censys/cloud_connectors/common/cli/commands/scan.py @@ -1,7 +1,5 @@ """Censys Cloud Connectors scan command.""" import argparse -import sched -import time from datetime import datetime from typing import Optional @@ -14,17 +12,17 @@ from censys.cloud_connectors.common.settings import Settings -def cli_scan(args: argparse.Namespace): +async def cli_scan(args: argparse.Namespace): """Scan with Censys Cloud Connectors. Args: args (argparse.Namespace): Namespace. """ - scheduler = sched.scheduler(time.time, time.sleep) logger = get_logger(log_name="censys_cloud_connectors", level="INFO") logger.info("Censys Cloud Connectors Version: %s", __version__) + logger.info("Async scan started") try: settings = Settings(_env_file=".env") # type: ignore @@ -42,18 +40,7 @@ def cli_scan(args: argparse.Namespace): logger.error(e) return - settings.scan_all() - - while args.scan_interval: - scheduler.enter(args.scan_interval * 3600.0, 1, settings.scan_all) - logger.info( - f"Finished scanning at time: {datetime.now().isoformat(' ', 'seconds')}. Sleeping for {args.scan_interval} hour(s)." - ) - try: - scheduler.run() - except KeyboardInterrupt: # pragma: no cover - logger.info("Exiting...") - return + await settings.scan_all() logger.info( f"Finished scanning at time: {datetime.now().isoformat(' ', 'seconds')}." @@ -85,21 +72,4 @@ def include_cli(parent_parser: argparse._SubParsersAction): default=None, ) - def interval_type(val) -> float: - val = float(val) - if val < 1: - raise argparse.ArgumentTypeError( - "Scan interval must be greater than or equal to 1 hour." - ) - return val - - config_parser.add_argument( - "-d", - "--daemon", - help="run on a scheduled interval (must be greater than or equal to 1 hour)", - dest="scan_interval", - nargs="?", - type=interval_type, - const=1, - ) config_parser.set_defaults(func=cli_scan) diff --git a/src/censys/cloud_connectors/common/connector.py b/src/censys/cloud_connectors/common/connector.py index 7ecd934..47a9fd4 100644 --- a/src/censys/cloud_connectors/common/connector.py +++ b/src/censys/cloud_connectors/common/connector.py @@ -2,9 +2,11 @@ from abc import ABC, abstractmethod from collections import defaultdict from enum import Enum +from functools import partial from logging import Logger -from typing import Callable, Optional, Union +from typing import Any, Callable, Coroutine, Optional, Union +import aiometer from requests.exceptions import JSONDecodeError from censys.asm import Seeds @@ -29,9 +31,9 @@ class CloudConnector(ABC): seeds_api: Seeds seeds: dict[str, set[Seed]] cloud_assets: dict[str, set[CloudAsset]] - seed_scanners: dict[str, Callable[[], None]] - cloud_asset_scanners: dict[str, Callable[[], None]] - current_service: Optional[Union[str, Enum]] + seed_scanners: dict[str, Callable[..., Coroutine[Any, Any, Any]]] + cloud_asset_scanners: dict[str, Callable[..., Coroutine[Any, Any, Any]]] + # current_service: Optional[Union[str, Enum]] def __init__(self, settings: Settings): """Initialize the Cloud Connector. @@ -63,35 +65,43 @@ def __init__(self, settings: Settings): self.seeds = defaultdict(set) self.cloud_assets = defaultdict(set) - self.current_service = None - - def get_seeds(self) -> None: - """Gather seeds.""" - for seed_type, seed_scanner in self.seed_scanners.items(): - self.current_service = seed_type - if ( - self.provider_settings.ignore - and seed_type in self.provider_settings.ignore - ): - self.logger.debug(f"Skipping {seed_type}") - continue - self.logger.debug(f"Scanning {seed_type}") - seed_scanner() - self.current_service = None - - def get_cloud_assets(self) -> None: - """Gather cloud assets.""" - for cloud_asset_type, cloud_asset_scanner in self.cloud_asset_scanners.items(): - self.current_service = cloud_asset_type - if ( - self.provider_settings.ignore - and cloud_asset_type in self.provider_settings.ignore - ): - self.logger.debug(f"Skipping {cloud_asset_type}") - continue - self.logger.debug(f"Scanning {cloud_asset_type}") - cloud_asset_scanner() - self.current_service = None + + async def get_seeds(self, provider_settings, **kwargs) -> None: + """Gather seeds. + + Args: + provider_settings (ProviderSpecificSettings): The provider settings. + **kwargs: Any additional keyword arguments. + """ + await aiometer.run_all( + [ # type: ignore + partial( + seed_scanner, provider_settings, current_service=seed_type, **kwargs + ) + for seed_type, seed_scanner in self.seed_scanners.items() + ], + max_at_once=self.settings.max_concurrent_scans, + ) + + async def get_cloud_assets(self, provider_settings, **kwargs) -> None: + """Gather cloud assets. + + Args: + provider_settings (ProviderSpecificSettings): The provider settings. + **kwargs: Any additional keyword arguments. + """ + await aiometer.run_all( + [ # type: ignore + partial( + cloud_asset_scanner, + provider_settings, + current_service=cloud_asset_type, + **kwargs, + ) + for cloud_asset_type, cloud_asset_scanner in self.cloud_asset_scanners.items() + ], + max_at_once=self.settings.max_concurrent_scans, + ) def get_event_context( self, @@ -111,7 +121,7 @@ def get_event_context( "event_type": event_type, "connector": self, "provider": self.provider, - "service": service or self.current_service, + "service": service, } def dispatch_event( @@ -158,7 +168,7 @@ def add_cloud_asset(self, cloud_asset: CloudAsset, **kwargs): EventTypeEnum.CLOUD_ASSET_FOUND, cloud_asset=cloud_asset, **kwargs ) - def submit_seeds(self): + async def submit_seeds(self): """Submit the seeds to the Censys ASM.""" submitted_seeds = 0 for label, seeds in self.seeds.items(): @@ -172,7 +182,7 @@ def submit_seeds(self): self.logger.info(f"Submitted {submitted_seeds} seeds.") self.dispatch_event(EventTypeEnum.SEEDS_SUBMITTED, count=submitted_seeds) - def submit_cloud_assets(self): + async def submit_cloud_assets(self): """Submit the cloud assets to the Censys ASM.""" submitted_assets = 0 for uid, cloud_assets in self.cloud_assets.items(): @@ -181,7 +191,7 @@ def submit_cloud_assets(self): "cloudConnectorUid": uid, "cloudAssets": [asset.to_dict() for asset in cloud_assets], } - self._add_cloud_assets(data) + await self._add_cloud_assets(data) submitted_assets += len(cloud_assets) except (CensysAsmException, JSONDecodeError) as e: self.logger.error(f"Error submitting cloud assets for {uid}: {e}") @@ -190,7 +200,7 @@ def submit_cloud_assets(self): EventTypeEnum.CLOUD_ASSETS_SUBMITTED, count=submitted_assets ) - def _add_cloud_assets(self, data: dict) -> dict: + async def _add_cloud_assets(self, data: dict) -> dict: """Add cloud assets to the Censys ASM. Args: @@ -218,26 +228,31 @@ def clear(self): self.seeds.clear() self.cloud_assets.clear() - def submit(self): # pragma: no cover + async def submit(self): # pragma: no cover """Submit the seeds and cloud assets to the Censys ASM.""" if self.settings.dry_run: self.logger.info("Dry run enabled. Skipping submission.") else: self.logger.info("Submitting seeds and assets...") - self.submit_seeds() - self.submit_cloud_assets() + await self.submit_seeds() + await self.submit_cloud_assets() self.clear() - def scan(self): - """Scan the seeds and cloud assets.""" + async def scan(self, provider_settings, **kwargs): + """Scan the seeds and cloud assets. + + Args: + provider_settings (ProviderSpecificSettings): The provider settings. + **kwargs: Any additional keyword arguments. + """ self.logger.info("Gathering seeds and assets...") self.dispatch_event(EventTypeEnum.SCAN_STARTED) - self.get_seeds() - self.get_cloud_assets() - self.submit() + await self.get_seeds(provider_settings, **kwargs) + await self.get_cloud_assets(provider_settings, **kwargs) + await self.submit() self.dispatch_event(EventTypeEnum.SCAN_FINISHED) @abstractmethod - def scan_all(self): + async def scan_all(self): """Scan all the seeds and cloud assets.""" pass diff --git a/src/censys/cloud_connectors/common/healthcheck.py b/src/censys/cloud_connectors/common/healthcheck.py index 6b2a7d8..5a66079 100644 --- a/src/censys/cloud_connectors/common/healthcheck.py +++ b/src/censys/cloud_connectors/common/healthcheck.py @@ -1,7 +1,7 @@ """Class for performing healthchecks on Cloud Connectors.""" import traceback from types import TracebackType -from typing import Any, Literal, Optional +from typing import Any, Literal, Optional, Union import requests @@ -21,7 +21,7 @@ def __init__( settings: Settings, provider_specific_settings: ProviderSpecificSettings, provider: Optional[dict] = None, - exception_map: Optional[dict[Exception, ErrorCodes]] = None, + exception_map: Optional[dict[Union[Exception, Any], ErrorCodes]] = None, **kwargs, ) -> None: """Initialize the Healthcheck. diff --git a/src/censys/cloud_connectors/common/settings.py b/src/censys/cloud_connectors/common/settings.py index fa4e367..7b07c30 100644 --- a/src/censys/cloud_connectors/common/settings.py +++ b/src/censys/cloud_connectors/common/settings.py @@ -182,6 +182,11 @@ class Settings(BaseSettings): env="HEALTHCHECK_ENABLED", description="Enable healthcheck", ) + max_concurrent_scans: int = Field( + default=10, + env="MAX_CONCURRENT_SCANS", + description="Maximum number of concurrent scans", + ) # Verification timeout validation_timeout: int = Field( @@ -268,7 +273,7 @@ def write_providers_config_file(self): with open(self.providers_config_file, "w") as f: yaml.safe_dump(all_providers, f, default_flow_style=False, sort_keys=False) - def scan_all(self): + async def scan_all(self): """Scan all providers. Raises: @@ -284,4 +289,4 @@ def scan_all(self): f"Connector module not found for provider: {provider}" ) from e connector: "CloudConnector" = connector_cls(self) - connector.scan_all() + await connector.scan_all() diff --git a/src/censys/cloud_connectors/gcp_connector/connector.py b/src/censys/cloud_connectors/gcp_connector/connector.py index 0662076..357ec52 100644 --- a/src/censys/cloud_connectors/gcp_connector/connector.py +++ b/src/censys/cloud_connectors/gcp_connector/connector.py @@ -6,7 +6,7 @@ from google.api_core import exceptions from google.cloud import securitycenter_v1 from google.cloud.securitycenter_v1.services.security_center.pagers import ( - ListAssetsPager, + ListAssetsAsyncPager, ) from google.cloud.securitycenter_v1.types import ListAssetsResponse from google.oauth2 import service_account @@ -27,10 +27,10 @@ class GcpCloudConnector(CloudConnector): """Gcp Cloud Connector.""" provider = ProviderEnum.GCP - organization_id: int - credentials: service_account.Credentials - provider_settings: GcpSpecificSettings - security_center_client: securitycenter_v1.SecurityCenterClient + # organization_id: int + # credentials: service_account.Credentials + # provider_settings: GcpSpecificSettings + # security_center_client: securitycenter_v1.SecurityCenterAsyncClient def __init__(self, settings: Settings): """Initialize Gcp Cloud Connector. @@ -50,18 +50,21 @@ def __init__(self, settings: Settings): GcpSecurityCenterResourceTypes.STORAGE_BUCKET: self.get_storage_buckets, } - def scan(self): + async def scan(self, provider_settings: GcpSpecificSettings): """Scan Gcp. Scans Gcp for assets and seeds. + Args: + provider_settings (GcpSpecificSettings): Gcp settings. + Raises: ValueError: If the service account credentials file is invalid. """ try: with Healthcheck( self.settings, - self.provider_settings, + provider_settings, exception_map={ exceptions.Unauthenticated: "PERMISSIONS", exceptions.PermissionDenied: "PERMISSIONS", @@ -69,13 +72,11 @@ def scan(self): ): key_file_path = ( Path(self.settings.secrets_dir) - / self.provider_settings.service_account_json_file + / provider_settings.service_account_json_file ) try: - self.credentials = ( - service_account.Credentials.from_service_account_file( - str(key_file_path) - ) + credentials = service_account.Credentials.from_service_account_file( + str(key_file_path) ) except ValueError as e: self.logger.error( @@ -83,31 +84,39 @@ def scan(self): f" {key_file_path}: {e}" ) raise - self.security_center_client = securitycenter_v1.SecurityCenterClient( - credentials=self.credentials + + security_center_client = securitycenter_v1.SecurityCenterAsyncClient( + credentials=credentials + ) + self.logger.info( + f"Scanning GCP organization {provider_settings.organization_id}" + ) + await super().scan( + provider_settings, security_center_client=security_center_client ) - self.logger.info(f"Scanning GCP organization {self.organization_id}") - super().scan() except Exception as e: self.logger.error( - f"Unable to scan GCP organization {self.organization_id}. Error: {e}", + f"Unable to scan GCP organization {provider_settings.organization_id}. Error: {e}", ) self.dispatch_event(EventTypeEnum.SCAN_FAILED, exception=e) - def scan_all(self): + async def scan_all(self): """Scan all Gcp Organizations.""" provider_settings: dict[ tuple, GcpSpecificSettings ] = self.settings.providers.get(self.provider, {}) for provider_setting in provider_settings.values(): - self.provider_settings = provider_setting - self.organization_id = provider_setting.organization_id - self.scan() + await self.scan(provider_setting) - def format_label(self, result: ListAssetsResponse.ListAssetsResult) -> str: + def format_label( + self, + provider_settings: GcpSpecificSettings, + result: ListAssetsResponse.ListAssetsResult, + ) -> str: """Format Gcp label. Args: + provider_settings (GcpSpecificSettings): Gcp settings. result (ListAssetsResponse.ListAssetsResult): Gcp asset result. Returns: @@ -117,46 +126,50 @@ def format_label(self, result: ListAssetsResponse.ListAssetsResult) -> str: asset_project_display_name = ( result.asset.security_center_properties.resource_project_display_name ) - return f"{self.label_prefix}{self.organization_id}/{asset_project_display_name}" + return f"{self.label_prefix}{provider_settings.organization_id}/{asset_project_display_name}" - def list_assets(self, filter: Optional[str] = None) -> ListAssetsPager: + async def list_assets( + self, + provider_settings: GcpSpecificSettings, + security_center_client: securitycenter_v1.SecurityCenterAsyncClient, + filter: Optional[str] = None, + ) -> ListAssetsAsyncPager: """List Gcp assets. Args: + provider_settings (GcpSpecificSettings): Gcp settings. + security_center_client (securitycenter_v1.SecurityCenterAsyncClient): Gcp security center client. filter (Optional[str]): Filter string. Returns: - ListAssetsPager: Gcp assets. + ListAssetsAsyncPager: Gcp assets. """ request = { - "parent": self.provider_settings.parent(), + "parent": provider_settings.parent(), } if filter: request["filter"] = filter - return self.security_center_client.list_assets(request=request) + return await security_center_client.list_assets(request=request) - def get_asset_dict( - self, list_assets_result: ListAssetsResponse.ListAssetsResult - ) -> dict: - """Get Gcp asset dict. + async def get_compute_instances( + self, + provider_settings: GcpSpecificSettings, + security_center_client: securitycenter_v1.SecurityCenterAsyncClient, + current_service: GcpSecurityCenterResourceTypes, + ): + """Get Gcp compute instances assets. Args: - list_assets_result (ListAssetsResponse.ListAssetsResult): Gcp asset result. - - Returns: - dict: Gcp asset dict. + provider_settings (GcpSpecificSettings): Gcp settings. + security_center_client (securitycenter_v1.SecurityCenterAsyncClient): Gcp security center client. + current_service (GcpSecurityCenterResourceTypes): Gcp security center resource type. """ - return { - "asset": list_assets_result.asset, - "label": self.format_label(list_assets_result), - } - - def get_compute_instances(self): - """Get Gcp compute instances assets.""" - list_assets_results = self.list_assets( - filter=GcpSecurityCenterResourceTypes.COMPUTE_INSTANCE.filter() + list_assets_results = await self.list_assets( + provider_settings, + security_center_client, + filter=GcpSecurityCenterResourceTypes.COMPUTE_INSTANCE.filter(), ) - for list_assets_result in list_assets_results: + async for list_assets_result in list_assets_results: if network_interfaces := list_assets_result.asset.resource_properties.get( "networkInterfaces" ): @@ -183,31 +196,60 @@ def get_compute_instances(self): with SuppressValidationError(): ip_seed = IpSeed( value=ip_address, - label=self.format_label(list_assets_result), + label=self.format_label( + provider_settings, list_assets_result + ), ) - self.add_seed(ip_seed) + self.add_seed(ip_seed, service=current_service) - def get_compute_addresses(self): - """Get Gcp ip address assets.""" - list_assets_results = self.list_assets( - filter=GcpSecurityCenterResourceTypes.COMPUTE_ADDRESS.filter() + async def get_compute_addresses( + self, + provider_settings: GcpSpecificSettings, + security_center_client: securitycenter_v1.SecurityCenterAsyncClient, + current_service: GcpSecurityCenterResourceTypes, + ): + """Get Gcp ip address assets. + + Args: + provider_settings (GcpSpecificSettings): Gcp settings. + security_center_client (securitycenter_v1.SecurityCenterAsyncClient): Gcp security center client. + current_service (GcpSecurityCenterResourceTypes): Gcp security center resource type. + """ + list_assets_results = await self.list_assets( + provider_settings, + security_center_client, + filter=GcpSecurityCenterResourceTypes.COMPUTE_ADDRESS.filter(), ) - for list_assets_result in list_assets_results: + async for list_assets_result in list_assets_results: if ip_address := list_assets_result.asset.resource_properties.get( "address" ): with SuppressValidationError(): ip_seed = IpSeed( - value=ip_address, label=self.format_label(list_assets_result) + value=ip_address, + label=self.format_label(provider_settings, list_assets_result), ) - self.add_seed(ip_seed) + self.add_seed(ip_seed, service=current_service) - def get_container_clusters(self): - """Get Gcp container clusters.""" - list_assets_results = self.list_assets( - filter=GcpSecurityCenterResourceTypes.CONTAINER_CLUSTER.filter() + async def get_container_clusters( + self, + provider_settings: GcpSpecificSettings, + security_center_client: securitycenter_v1.SecurityCenterAsyncClient, + current_service: GcpSecurityCenterResourceTypes, + ): + """Get Gcp container clusters. + + Args: + provider_settings (GcpSpecificSettings): Gcp settings. + security_center_client (securitycenter_v1.SecurityCenterAsyncClient): Gcp security center client. + current_service (GcpSecurityCenterResourceTypes): Gcp security center resource type. + """ + list_assets_results = await self.list_assets( + provider_settings, + security_center_client, + filter=GcpSecurityCenterResourceTypes.CONTAINER_CLUSTER.filter(), ) - for list_assets_result in list_assets_results: + async for list_assets_result in list_assets_results: if private_cluster_config := list_assets_result.asset.resource_properties.get( "privateClusterConfig" ): @@ -222,16 +264,31 @@ def get_container_clusters(self): with SuppressValidationError(): ip_seed = IpSeed( value=ip_address, - label=self.format_label(list_assets_result), + label=self.format_label( + provider_settings, list_assets_result + ), ) - self.add_seed(ip_seed) + self.add_seed(ip_seed, service=current_service) - def get_cloud_sql_instances(self): - """Get Gcp cloud sql instances.""" - list_assets_results = self.list_assets( - filter=GcpSecurityCenterResourceTypes.CLOUD_SQL_INSTANCE.filter() + async def get_cloud_sql_instances( + self, + provider_settings: GcpSpecificSettings, + security_center_client: securitycenter_v1.SecurityCenterAsyncClient, + current_service: GcpSecurityCenterResourceTypes, + ): + """Get Gcp cloud sql instances. + + Args: + provider_settings (GcpSpecificSettings): Gcp settings. + security_center_client (securitycenter_v1.SecurityCenterAsyncClient): Gcp security center client. + current_service (GcpSecurityCenterResourceTypes): Gcp security center resource type. + """ + list_assets_results = await self.list_assets( + provider_settings, + security_center_client, + filter=GcpSecurityCenterResourceTypes.CLOUD_SQL_INSTANCE.filter(), ) - for list_assets_result in list_assets_results: + async for list_assets_result in list_assets_results: if ip_addresses := list_assets_result.asset.resource_properties.get( "ipAddresses" ): @@ -242,32 +299,61 @@ def get_cloud_sql_instances(self): with SuppressValidationError(): ip_seed = IpSeed( value=ip_address, - label=self.format_label(list_assets_result), + label=self.format_label( + provider_settings, list_assets_result + ), ) - self.add_seed(ip_seed) + self.add_seed(ip_seed, service=current_service) - def get_dns_records(self): - """Get Gcp dns records.""" - list_assets_results = self.list_assets( - filter=GcpSecurityCenterResourceTypes.DNS_ZONE.filter() + async def get_dns_records( + self, + provider_settings: GcpSpecificSettings, + security_center_client: securitycenter_v1.SecurityCenterAsyncClient, + current_service: GcpSecurityCenterResourceTypes, + ): + """Get Gcp dns records. + + Args: + provider_settings (GcpSpecificSettings): Gcp settings. + security_center_client (securitycenter_v1.SecurityCenterAsyncClient): Gcp security center client. + current_service (GcpSecurityCenterResourceTypes): Gcp security center resource type. + """ + list_assets_results = await self.list_assets( + provider_settings, + security_center_client, + filter=GcpSecurityCenterResourceTypes.DNS_ZONE.filter(), ) - for list_assets_result in list_assets_results: + async for list_assets_result in list_assets_results: resource_properties = list_assets_result.asset.resource_properties if resource_properties.get("visibility") == "PUBLIC" and ( domain := resource_properties.get("dnsName") ): with SuppressValidationError(): domain_seed = DomainSeed( - value=domain, label=self.format_label(list_assets_result) + value=domain, + label=self.format_label(provider_settings, list_assets_result), ) - self.add_seed(domain_seed) + self.add_seed(domain_seed, service=current_service) - def get_storage_buckets(self): - """Get Gcp storage buckets.""" - list_assets_results = self.list_assets( - filter=GcpSecurityCenterResourceTypes.STORAGE_BUCKET.filter() + async def get_storage_buckets( + self, + provider_settings: GcpSpecificSettings, + security_center_client: securitycenter_v1.SecurityCenterAsyncClient, + current_service: GcpSecurityCenterResourceTypes, + ): + """Get Gcp storage buckets. + + Args: + provider_settings (GcpSpecificSettings): Gcp settings. + security_center_client (securitycenter_v1.SecurityCenterAsyncClient): Gcp security center client. + current_service (GcpSecurityCenterResourceTypes): Gcp security center resource type. + """ + list_assets_results = await self.list_assets( + provider_settings, + security_center_client, + filter=GcpSecurityCenterResourceTypes.STORAGE_BUCKET.filter(), ) - for list_assets_result in list_assets_results: + async for list_assets_result in list_assets_results: resource_properties = list_assets_result.asset.resource_properties if (bucket_name := resource_properties.get("id")) and ( project_number := resource_properties.get("projectNumber") @@ -285,8 +371,8 @@ def get_storage_buckets(self): bucket_asset = GcpStorageBucketAsset( # TODO: Update when API can accept other urls value=f"https://storage.googleapis.com/{bucket_name}", - uid=self.format_label(list_assets_result), + uid=self.format_label(provider_settings, list_assets_result), # Cast project_number to int from float scan_data=scan_data, ) - self.add_cloud_asset(bucket_asset) + self.add_cloud_asset(bucket_asset, service=current_service) From 19fd5397c17e0199cb92dd335bc14de62bd7ef16 Mon Sep 17 00:00:00 2001 From: Aidan Holland Date: Tue, 13 Jun 2023 11:02:31 -0400 Subject: [PATCH 02/19] chore: typing tooling --- .vscode/extensions.json | 3 ++- .vscode/settings.json | 2 -- poetry.lock | 4 ++-- pyproject.toml | 3 ++- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.vscode/extensions.json b/.vscode/extensions.json index 3fa987f..c550278 100644 --- a/.vscode/extensions.json +++ b/.vscode/extensions.json @@ -10,6 +10,7 @@ "ms-python.python", "njpwerner.autodocstring", "redhat.vscode-yaml", - "streetsidesoftware.code-spell-checker" + "streetsidesoftware.code-spell-checker", + "ms-python.mypy-type-checker" ] } diff --git a/.vscode/settings.json b/.vscode/settings.json index eb3f2d1..f753f57 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -6,8 +6,6 @@ "python.linting.enabled": true, "python.linting.flake8Enabled": true, "python.linting.flake8Path": "${workspaceFolder}/.venv/bin/flake8", - "python.linting.mypyEnabled": true, - "python.linting.mypyPath": "${workspaceFolder}/.venv/bin/mypy", "python.poetryPath": "poetry", "python.terminal.activateEnvironment": true, "python.testing.pytestArgs": ["--no-cov"], diff --git a/poetry.lock b/poetry.lock index 4952112..6567c93 100644 --- a/poetry.lock +++ b/poetry.lock @@ -6287,5 +6287,5 @@ testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more [metadata] lock-version = "2.0" -python-versions = "^3.9" -content-hash = "f0ab89fbd5465bbb4a546cd0edcfcabd96cc11df42526ad333353cb0c765cd35" +python-versions = ">=3.9,<4.0" +content-hash = "5faff16b7efedb18460d56b8f53efd523a392b2594b9560c0bd82084b9cb755c" diff --git a/pyproject.toml b/pyproject.toml index 82000db..cbcc93e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -41,7 +41,7 @@ classifiers = [ censys-cc = "censys.cloud_connectors.common.cli:main" [tool.poetry.dependencies] -python = "^3.9" +python = ">=3.9,<4.0" aiometer = "^0.4.0" backoff = "^2.2.1" censys = "^2.2.2" @@ -143,6 +143,7 @@ python_version = "3.9" strict_optional = true warn_redundant_casts = true warn_unused_configs = true +check_untyped_defs = true [[tool.mypy.overrides]] module = [ From 1c6edc04174c0bfd25d400f814b584738945161f Mon Sep 17 00:00:00 2001 From: Aidan Holland Date: Tue, 20 Jun 2023 12:28:38 -0400 Subject: [PATCH 03/19] feat: make aws async --- .vscode/settings.json | 1 + .../aws_connector/connector.py | 1241 ++++++++++------- .../aws_connector/credentials.py | 234 ++++ .../aws_connector/settings.py | 4 +- .../common/cli/commands/scan.py | 1 - .../gcp_connector/connector.py | 4 +- templates/aws/stackset_role_deploy.json | 18 +- 7 files changed, 1029 insertions(+), 474 deletions(-) create mode 100644 src/censys/cloud_connectors/aws_connector/credentials.py diff --git a/.vscode/settings.json b/.vscode/settings.json index f753f57..4528ffe 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -15,6 +15,7 @@ "pythonTestExplorer.testFramework": "pytest", "cSpell.enabled": true, "cSpell.words": [ + "aiobotocore", "apigatewayv", "autoupdate", "autouse", diff --git a/src/censys/cloud_connectors/aws_connector/connector.py b/src/censys/cloud_connectors/aws_connector/connector.py index fa7493b..65b3d84 100644 --- a/src/censys/cloud_connectors/aws_connector/connector.py +++ b/src/censys/cloud_connectors/aws_connector/connector.py @@ -1,36 +1,26 @@ """AWS Cloud Connector.""" import contextlib -from collections.abc import Generator, Sequence -from typing import Any, Optional, TypeVar +from collections.abc import Sequence +from typing import AsyncGenerator, List, Optional -import boto3 -import botocore +from aiobotocore.session import get_session from botocore.exceptions import ClientError -from mypy_boto3_apigateway import APIGatewayClient -from mypy_boto3_apigatewayv2 import ApiGatewayV2Client -from mypy_boto3_ec2 import EC2Client from mypy_boto3_ec2.type_defs import ( FilterTypeDef, NetworkInterfaceTypeDef, TagDescriptionTypeDef, TagTypeDef, ) -from mypy_boto3_ecs import ECSClient -from mypy_boto3_elb import ElasticLoadBalancingClient -from mypy_boto3_elbv2 import ElasticLoadBalancingv2Client -from mypy_boto3_rds import RDSClient -from mypy_boto3_route53 import Route53Client -from mypy_boto3_s3 import S3Client -from mypy_boto3_sts import STSClient -from mypy_boto3_sts.type_defs import CredentialsTypeDef - -from censys.cloud_connectors.aws_connector.enums import ( - AwsDefaults, - AwsResourceTypes, - AwsServices, - SeedLabel, -) -from censys.cloud_connectors.aws_connector.settings import AwsSpecificSettings +from types_aiobotocore_apigateway.client import APIGatewayClient +from types_aiobotocore_apigatewayv2.client import ApiGatewayV2Client +from types_aiobotocore_ec2.client import EC2Client +from types_aiobotocore_ecs.client import ECSClient +from types_aiobotocore_elb.client import ElasticLoadBalancingClient +from types_aiobotocore_elbv2.client import ElasticLoadBalancingv2Client +from types_aiobotocore_rds.client import RDSClient +from types_aiobotocore_route53.client import Route53Client +from types_aiobotocore_s3.client import S3Client + from censys.cloud_connectors.common.cloud_asset import AwsStorageBucketAsset from censys.cloud_connectors.common.connector import CloudConnector from censys.cloud_connectors.common.context import SuppressValidationError @@ -39,7 +29,9 @@ from censys.cloud_connectors.common.seed import DomainSeed, IpSeed from censys.cloud_connectors.common.settings import Settings -T = TypeVar("T", bound="botocore.client.BaseClient") +from .credentials import AwsCredentials, get_aws_credentials +from .enums import AwsResourceTypes, SeedLabel +from .settings import AwsSpecificSettings VALID_RECORD_TYPES = ["A", "CNAME"] IGNORED_TAGS = ["censys-cloud-connector-ignore"] @@ -56,19 +48,8 @@ class AwsCloudConnector(CloudConnector): provider = ProviderEnum.AWS provider_settings: AwsSpecificSettings - # Temporary STS credentials created with Assume Role will be stored here during - # a connector scan. - temp_sts_cred: Optional[dict] = None - - # When scanning, the current loaded credential will be set here. - credential: dict = {} - account_number: str - region: Optional[str] - - # Current set of ignored tags (combined set of user settings + overall settings) - ignored_tags: list[str] - global_ignored_tags: set[str] + ignore_tags: List[str] def __init__(self, settings: Settings): """Initialize AWS Cloud Connectors. @@ -89,311 +70,514 @@ def __init__(self, settings: Settings): AwsResourceTypes.STORAGE_BUCKET: self.get_s3_instances, } - self.ignored_tags = [] + self.ignored_tags: List[str] = [] self.global_ignored_tags: set[str] = set(IGNORED_TAGS) - def scan(self): - """Scan AWS.""" + async def scan( + self, + provider_setting: AwsSpecificSettings, + credentials: AwsCredentials, + region: str, + ignored_tags: list[str], + ): + """Scan AWS. + + Args: + provider_setting (AwsSpecificSettings): AWS provider settings. + credentials (AwsCredentials): AWS credentials. + region (str): AWS region. + ignored_tags (list[str], optional): List of tags to ignore. Defaults to IGNORED_TAGS. + """ self.logger.info( - f"Scanning AWS account {self.account_number} in region {self.region}" + f"Scanning AWS account {self.account_number} in region {region}" + ) + await super().scan( + provider_setting, + credentials=credentials, + region=region, + ignored_tags=ignored_tags, ) - super().scan() - def scan_all(self): + async def scan_all(self): """Scan all configured AWS provider accounts.""" provider_settings: dict[ tuple, AwsSpecificSettings - ] = self.settings.providers.get(self.provider, {}) + ] = self.settings.providers.get( + self.provider, {} + ) # type: ignore for provider_setting in provider_settings.values(): - self.provider_settings = provider_setting - - for credential in self.provider_settings.get_credentials(): - self.credential = credential - self.account_number = credential["account_number"] - self.ignored_tags = self.get_ignored_tags(credential["ignore_tags"]) - - for region in self.provider_settings.regions: - self.temp_sts_cred = None - self.region = region - try: - with Healthcheck( - self.settings, - provider_setting, - provider={ - "region": region, - "account_number": self.account_number, - }, - ): - self.scan() - except Exception as e: - self.logger.error( - f"Unable to scan account {self.account_number} in region {self.region}. Error: {e}" - ) - self.dispatch_event(EventTypeEnum.SCAN_FAILED, exception=e) - self.region = None + if provider_setting.accounts: + for account in provider_setting.accounts: + self.account_number = account.account_number + self.ignored_tags = self.get_ignored_tags(account.ignore_tags) + + for region in provider_setting.regions: + try: + with Healthcheck( + self.settings, + provider_setting, + provider={ + "region": region, + "account_number": self.account_number, + }, + ): + credentials = await get_aws_credentials( + provider_setting, account, region + ) + await self.scan( + provider_setting, + credentials, + region, + ignored_tags=self.ignored_tags, + ) + except Exception as e: + self.logger.error( + f"Unable to scan account {self.account_number} in region {region}. Error: {e}" + ) + self.dispatch_event(EventTypeEnum.SCAN_FAILED, exception=e) - def format_label(self, service: AwsServices, region: Optional[str] = None) -> str: + else: + self.account_number = provider_setting.account_number + self.ignored_tags = self.get_ignored_tags(provider_setting.ignore_tags) + + for region in provider_setting.regions: + credentials = await get_aws_credentials( + provider_setting, None, region + ) + await self.scan( + provider_setting, + credentials, + region, + ignored_tags=self.ignored_tags, + ) + # try: + # with Healthcheck( + # self.settings, + # provider_setting, + # provider={ + # "region": region, + # "account_number": self.account_number, + # }, + # ): + # credentials = await get_aws_credentials( + # provider_setting, None, region + # ) + # await self.scan( + # provider_setting, + # credentials, + # region, + # ignored_tags=self.ignored_tags, + # ) + # except Exception as e: + # self.logger.error( + # f"Unable to scan account {self.account_number} in region {region}. Error: {e}" + # ) + # self.dispatch_event(EventTypeEnum.SCAN_FAILED, exception=e) + + def format_label(self, service: SeedLabel, region: Optional[str] = None) -> str: """Format AWS label. Args: - service (AwsServices): AWS Service Type + service (SeedLabel): AWS Service Type region (str): AWS Region override Returns: str: Formatted label. """ - region = region or self.region region_label = f"/{region}" if region != "" else "" return f"AWS: {service} - {self.account_number}{region_label}" - def credentials(self) -> dict: - """Generate required credentials for AWS. - - This method will attempt to use any active STS sessions before falling - back on the regular provider settings. + # async def credentials(self) -> dict: + # """Generate required credentials for AWS. + + # This method will attempt to use any active STS sessions before falling + # back on the regular provider settings. + + # Returns: + # dict: Boto Credential format. + # """ + # # Role name is the credential field which causes STS to activate. + # # Once activated the temporary STS creds will be used by all + # # subsequent AWS service client calls. + # if role_name := self.credential.get("role_name"): + # self.logger.debug(f"Using STS for role {role_name}") + # return await self.get_assume_role_credentials(role_name) + + # self.logger.debug("Using provider settings credentials") + # return self.boto_cred( + # self.region, + # self.provider_settings.access_key, + # self.provider_settings.secret_key, + # self.provider_settings.session_token, + # ) + + # async def get_aws_client_kwargs( + # self, service: AwsServices, credentials: Optional[dict] = None + # ) -> dict: + # """Creates an AWS client for the provided service. + + # Args: + # service (AwsServices): The AWS service name. + # credentials (AwsCredentials): Override credentials instead of using the default. + + # Raises: + # Exception: If the client could not be created. + + # Returns: + # dict: An AWS boto3 client. + # """ + # try: + # credentials = credentials or await self.credentials() + # if credentials.get("aws_access_key_id"): + # self.logger.debug(f"AWS Service {service} using access key credentials") + # return credentials + + # # calling client without credentials follows the standard + # # credential import path to source creds from the environment + # self.logger.debug( + # f"AWS Service {service} using external boto configuration" + # ) + # return {} # type: ignore + # except Exception as e: + # self.logger.error( + # f"Could not connect with client type '{service}'. Error: {e}" + # ) + # raise + + # async def get_assume_role_credentials(self, role_name: str) -> dict: + # """Acquire temporary STS credentials and cache them for the duration of the scan. + + # Args: + # role_name (str): Role name. + + # Returns: + # dict: STS credentials. + + # Raises: + # Exception: If the credentials could not be created. + # """ + # if self.temp_sts_cred: + # self.logger.debug("Using cached temporary STS credentials") + # else: + # try: + # temp_creds = await self.assume_role(role_name) + # self.temp_sts_cred = self.boto_cred( + # self.region, + # temp_creds["AccessKeyId"], + # temp_creds["SecretAccessKey"], + # temp_creds["SessionToken"], + # ) + # self.logger.debug( + # f"Created temporary STS credentials for role {role_name}" + # ) + # except Exception as e: + # self.logger.error(f"Failed to assume role: {e}") + # raise + + # return self.temp_sts_cred + + # def boto_cred( + # self, + # region_name: Optional[str] = None, + # access_key: Optional[str] = None, + # secret_key: Optional[str] = None, + # session_token: Optional[str] = None, + # ) -> dict[str, Any]: + # """Create a boto3 credential dict. Only params with values are included. + + # Args: + # region_name (str): AWS region. + # access_key (str): AWS access key. + # secret_key (str): AWS secret key. + # session_token (str): AWS session token. + + # Returns: + # dict: boto3 credential dict. + # """ + # cred = {} + + # if region_name: + # cred["region_name"] = region_name + + # if access_key: + # cred["aws_access_key_id"] = access_key + + # if secret_key: + # cred["aws_secret_access_key"] = secret_key + + # if session_token: + # cred["aws_session_token"] = session_token + + # return cred + + # async def assume_role( + # self, role_name: str = AwsDefaults.ROLE_NAME.value + # ) -> CredentialsTypeDef: + # """Acquire temporary credentials generated by Secure Token Service (STS). + + # This will always use the primary AWS account credentials when querying + # the STS service. + + # Args: + # role_name (str): Role name to assume. Defaults to "CensysCloudConnectorRole". + + # Returns: + # CredentialsTypeDef: Temporary credentials. + # """ + # credentials = self.boto_cred( + # self.region, + # self.provider_settings.access_key, + # self.provider_settings.secret_key, + # self.provider_settings.session_token, + # ) + + # # pass in explicit boto creds to force a new STS session + # aws_kwargs = await self.get_aws_client_kwargs( + # service=AwsServices.SECURE_TOKEN_SERVICE, # type: ignore + # credentials=credentials, + # ) + # async with get_session().create_client("sts", **aws_kwargs) as client: # type: ignore + # client: STSClient # type: ignore[no-redef] + # role: dict[str, Any] = { + # "RoleArn": f"arn:aws:iam::{self.account_number}:role/{role_name}", + # "RoleSessionName": self.credential["role_session_name"] + # or AwsDefaults.ROLE_SESSION_NAME.value, + # } + + # temp_creds = await client.assume_role(**role) + + # self.logger.debug( + # f"Assume role acquired temporary credentials for role {role_name}" + # ) + # return temp_creds["Credentials"] + + async def get_api_gateway_domains_v1( + self, + provider_setting: AwsSpecificSettings, + credentials: AwsCredentials, + region: str, + ignored_tags: list[str], + current_service: str, + ) -> None: + """Retrieve all API Gateway V1 domains and emit seeds. - Returns: - dict: Boto Credential format. + Args: + provider_setting (AwsSpecificSettings): AWS provider settings. + credentials (AwsCredentials): AWS credentials. + region (str): AWS region. + ignored_tags (list[str], optional): List of tags to ignore. Defaults to IGNORED_TAGS. + current_service (str): Current service. """ - # Role name is the credential field which causes STS to activate. - # Once activated the temporary STS creds will be used by all - # subsequent AWS service client calls. - if role_name := self.credential.get("role_name"): - self.logger.debug(f"Using STS for role {role_name}") - return self.get_assume_role_credentials(role_name) - - self.logger.debug("Using provider settings credentials") - return self.boto_cred( - self.region, - self.provider_settings.access_key, - self.provider_settings.secret_key, - self.provider_settings.session_token, - ) + label = self.format_label(SeedLabel.API_GATEWAY, region) - def get_aws_client( - self, service: AwsServices, credentials: Optional[dict] = None - ) -> T: - """Creates an AWS client for the provided service. + # aws_kwargs = await self.get_aws_client_kwargs( + # service=AwsServices.API_GATEWAY, # type: ignore + # credentials=credentials, + # ) + async with get_session().create_client( + "apigateway", **credentials + ) as client: # type: ignore + client: APIGatewayClient # type: ignore[no-redef] - Args: - service (AwsServices): The AWS service name. - credentials (dict): Override credentials instead of using the default. - - Raises: - Exception: If the client could not be created. + try: + apis = await client.get_rest_apis() + for domain in apis.get("items", []): + domain_id = domain["id"] + domain_name = f"{domain_id}.execute-api.{region}.amazonaws.com" + with SuppressValidationError(): + domain_seed = DomainSeed(value=domain_name, label=label) + self.add_seed(domain_seed, api_gateway_res=domain) + except ClientError as e: + self.logger.error(f"Could not connect to API Gateway V1. Error: {e}") - Returns: - T: An AWS boto3 client. - """ - try: - credentials = credentials or self.credentials() - if credentials.get("aws_access_key_id"): - self.logger.debug(f"AWS Service {service} using access key credentials") - return boto3.client(service, **credentials) # type: ignore - - # calling client without credentials follows the standard - # credential import path to source creds from the environment - self.logger.debug( - f"AWS Service {service} using external boto configuration" - ) - return boto3.client(service) # type: ignore - except Exception as e: - self.logger.error( - f"Could not connect with client type '{service}'. Error: {e}" - ) - raise - - def get_assume_role_credentials(self, role_name: Optional[str] = None) -> dict: - """Acquire temporary STS credentials and cache them for the duration of the scan. + async def get_api_gateway_domains_v2( + self, + provider_setting: AwsSpecificSettings, + credentials: AwsCredentials, + region: str, + ignored_tags: list[str], + current_service: str, + ) -> None: + """Retrieve API Gateway V2 domains and emit seeds. Args: - role_name (str): Role name. + provider_setting (AwsSpecificSettings): AWS provider settings. + credentials (AwsCredentials): AWS credentials. + region (str): AWS region. + ignored_tags (list[str], optional): List of tags to ignore. Defaults to IGNORED_TAGS. + current_service (str): Current service. + """ + label = self.format_label(SeedLabel.API_GATEWAY, region) - Returns: - dict: STS credentials. + # aws_kwargs = await self.get_aws_client_kwargs( + # service=AwsServices.API_GATEWAY_V2, # type: ignore + # credentials=credentials, + # ) + async with get_session().create_client( + "apigatewayv2", **credentials + ) as client: # type: ignore + client: ApiGatewayV2Client # type: ignore[no-redef] - Raises: - Exception: If the credentials could not be created. - """ - if self.temp_sts_cred: - self.logger.debug("Using cached temporary STS credentials") - else: try: - temp_creds = self.assume_role(role_name) - self.temp_sts_cred = self.boto_cred( - self.region, - temp_creds["AccessKeyId"], - temp_creds["SecretAccessKey"], - temp_creds["SessionToken"], - ) - self.logger.debug( - f"Created temporary STS credentials for role {role_name}" - ) - except Exception as e: - self.logger.error(f"Failed to assume role: {e}") - raise - - return self.temp_sts_cred + apis = await client.get_apis() + for domain in apis.get("Items", []): + domain_name = domain["ApiEndpoint"].split("//")[1] + with SuppressValidationError(): + domain_seed = DomainSeed(value=domain_name, label=label) + self.add_seed(domain_seed, api_gateway_res=domain) + except ClientError as e: + self.logger.error(f"Could not connect to API Gateway V2. Error: {e}") - def boto_cred( + async def get_api_gateway_domains( self, - region_name: str = None, - access_key: str = None, - secret_key: str = None, - session_token: str = None, - ) -> dict[str, Any]: - """Create a boto3 credential dict. Only params with values are included. + provider_setting: AwsSpecificSettings, + credentials: AwsCredentials, + region: str, + ignored_tags: list[str], + current_service: str, + ) -> None: + """Retrieve all versions of Api Gateway data and emit seeds. Args: - region_name (str): AWS region. - access_key (str): AWS access key. - secret_key (str): AWS secret key. - session_token (str): AWS session token. - - Returns: - dict: boto3 credential dict. + provider_setting (AwsSpecificSettings): AWS provider settings. + credentials (AwsCredentials): AWS credentials. + region (str): AWS region. + ignored_tags (list[str], optional): List of tags to ignore. Defaults to IGNORED_TAGS. + current_service (str): Current service. """ - cred = {} + await self.get_api_gateway_domains_v1( + provider_setting, credentials, region, ignored_tags, current_service + ) + await self.get_api_gateway_domains_v2( + provider_setting, credentials, region, ignored_tags, current_service + ) - if region_name: - cred["region_name"] = region_name + async def get_load_balancers_v1( + self, + provider_setting: AwsSpecificSettings, + credentials: AwsCredentials, + region: str, + ignored_tags: list[str], + current_service: str, + ) -> None: + """Retrieve Elastic Load Balancers (ELB) V1 data and emit seeds. - if access_key: - cred["aws_access_key_id"] = access_key + Args: + provider_setting (AwsSpecificSettings): AWS provider settings. + credentials (AwsCredentials): AWS credentials. + region (str): AWS region. + ignored_tags (list[str], optional): List of tags to ignore. Defaults to IGNORED_TAGS. + current_service (str): Current service. + """ + label = self.format_label(SeedLabel.LOAD_BALANCER, region) + + # aws_kwargs = await self.get_aws_client_kwargs( + # service=AwsServices.LOAD_BALANCER, # type: ignore + # credentials=credentials, + # ) + async with get_session().create_client( + "elb", + **credentials, + ) as client: # type: ignore + client: ElasticLoadBalancingClient # type: ignore[no-redef] - if secret_key: - cred["aws_secret_access_key"] = secret_key + try: + data = await client.describe_load_balancers() + for elb in data.get("LoadBalancerDescriptions", []): + if value := elb.get("DNSName"): + with SuppressValidationError(): + domain_seed = DomainSeed(value=value, label=label) + self.add_seed(domain_seed, elb_res=elb, aws_client=client) + except ClientError as e: + self.logger.error(f"Could not connect to ELB V1. Error: {e}") - if session_token: - cred["aws_session_token"] = session_token + async def get_load_balancers_v2( + self, + provider_setting: AwsSpecificSettings, + credentials: AwsCredentials, + region: str, + ignored_tags: list[str], + current_service: str, + ) -> None: + """Retrieve Elastic Load Balancers (ELB) V2 data and emit seeds. - return cred + Args: + provider_setting (AwsSpecificSettings): AWS provider settings. + credentials (AwsCredentials): AWS credentials. + region (str): AWS region. + ignored_tags (list[str], optional): List of tags to ignore. Defaults to IGNORED_TAGS. + current_service (str): Current service. + """ + label = self.format_label(SeedLabel.LOAD_BALANCER, region) + + # aws_kwargs = await self.get_aws_client_kwargs( + # service=AwsServices.LOAD_BALANCER_V2, # type: ignore + # credentials=credentials, + # ) + async with get_session().create_client( + "elbv2", + **credentials, + ) as client: # type: ignore + client: ElasticLoadBalancingv2Client # type: ignore[no-redef] - def assume_role( - self, role_name: Optional[str] = AwsDefaults.ROLE_NAME.value - ) -> CredentialsTypeDef: - """Acquire temporary credentials generated by Secure Token Service (STS). + try: + data = await client.describe_load_balancers() + for elb in data.get("LoadBalancers", []): + if value := elb.get("DNSName"): + with SuppressValidationError(): + domain_seed = DomainSeed(value=value, label=label) + self.add_seed(domain_seed, elb_res=elb, aws_client=client) + except ClientError as e: + self.logger.error(f"Could not connect to ELB V2. Error: {e}") - This will always use the primary AWS account credentials when querying - the STS service. + async def get_load_balancers( + self, + provider_setting: AwsSpecificSettings, + credentials: AwsCredentials, + region: str, + ignored_tags: list[str], + current_service: str, + ) -> None: + """Retrieve Elastic Load Balancers (ELB) data and emit seeds. Args: - role_name (str, optional): Role name to assume. Defaults to "CensysCloudConnectorRole". - - Returns: - CredentialsTypeDef: Temporary credentials. + provider_setting (AwsSpecificSettings): AWS provider settings. + credentials (AwsCredentials): AWS credentials. + region (str): AWS region. + ignored_tags (list[str], optional): List of tags to ignore. Defaults to IGNORED_TAGS. + current_service (str): Current service. """ - credentials = self.boto_cred( - self.region, - self.provider_settings.access_key, - self.provider_settings.secret_key, - self.provider_settings.session_token, + await self.get_load_balancers_v1( + provider_setting, credentials, region, ignored_tags, current_service ) - - # pass in explicit boto creds to force a new STS session - client: STSClient = self.get_aws_client( - service=AwsServices.SECURE_TOKEN_SERVICE, # type: ignore - credentials=credentials, + await self.get_load_balancers_v2( + provider_setting, credentials, region, ignored_tags, current_service ) - role_session = ( - self.credential["role_session_name"] or AwsDefaults.ROLE_SESSION_NAME.value - ) - role: dict[str, Any] = { - "RoleArn": f"arn:aws:iam::{self.account_number}:role/{role_name}", - "RoleSessionName": role_session, - } - - temp_creds = client.assume_role(**role) + async def get_network_interfaces( + self, + provider_setting: AwsSpecificSettings, + credentials: AwsCredentials, + region: str, + ignored_tags: list[str], + current_service: str, + ) -> None: + """Retrieve EC2 Elastic Network Interfaces (ENI) data and emit seeds. - self.logger.debug( - f"Assume role acquired temporary credentials for role {role_name}" - ) - return temp_creds["Credentials"] - - def get_api_gateway_domains_v1(self): - """Retrieve all API Gateway V1 domains and emit seeds.""" - client: APIGatewayClient = self.get_aws_client(service=AwsServices.API_GATEWAY) - label = self.format_label(SeedLabel.API_GATEWAY) - - try: - apis = client.get_rest_apis() - for domain in apis.get("items", []): - domain_name = f"{domain['id']}.execute-api.{self.region}.amazonaws.com" - with SuppressValidationError(): - domain_seed = DomainSeed(value=domain_name, label=label) - self.add_seed(domain_seed, api_gateway_res=domain) - except ClientError as e: - self.logger.error(f"Could not connect to API Gateway V1. Error: {e}") - - def get_api_gateway_domains_v2(self): - """Retrieve API Gateway V2 domains and emit seeds.""" - client: ApiGatewayV2Client = self.get_aws_client( - service=AwsServices.API_GATEWAY_V2 - ) - label = self.format_label(SeedLabel.API_GATEWAY) - - try: - apis = client.get_apis() - for domain in apis.get("Items", []): - domain_name = domain["ApiEndpoint"].split("//")[1] - with SuppressValidationError(): - domain_seed = DomainSeed(value=domain_name, label=label) - self.add_seed(domain_seed, api_gateway_res=domain) - except ClientError as e: - self.logger.error(f"Could not connect to API Gateway V2. Error: {e}") - - def get_api_gateway_domains(self): - """Retrieve all versions of Api Gateway data and emit seeds.""" - self.get_api_gateway_domains_v1() - self.get_api_gateway_domains_v2() - - def get_load_balancers_v1(self): - """Retrieve Elastic Load Balancers (ELB) V1 data and emit seeds.""" - client: ElasticLoadBalancingClient = self.get_aws_client( - service=AwsServices.LOAD_BALANCER - ) - label = self.format_label(SeedLabel.LOAD_BALANCER) + Args: + provider_setting (AwsSpecificSettings): AWS provider settings. + credentials (AwsCredentials): AWS credentials. + region (str): AWS region. + ignored_tags (list[str], optional): List of tags to ignore. Defaults to IGNORED_TAGS. + current_service (str): Current service. + """ + label = self.format_label(SeedLabel.NETWORK_INTERFACE, region) - try: - data = client.describe_load_balancers() - for elb in data.get("LoadBalancerDescriptions", []): - if value := elb.get("DNSName"): - with SuppressValidationError(): - domain_seed = DomainSeed(value=value, label=label) - self.add_seed(domain_seed, elb_res=elb, aws_client=client) - except ClientError as e: - self.logger.error(f"Could not connect to ELB V1. Error: {e}") - - def get_load_balancers_v2(self): - """Retrieve Elastic Load Balancers (ELB) V2 data and emit seeds.""" - client: ElasticLoadBalancingv2Client = self.get_aws_client( - service=AwsServices.LOAD_BALANCER_V2 + interfaces = await self.describe_network_interfaces( + provider_setting, credentials, region, ignored_tags ) - label = self.format_label(SeedLabel.LOAD_BALANCER) - - try: - data = client.describe_load_balancers() - for elb in data.get("LoadBalancers", []): - if value := elb.get("DNSName"): - with SuppressValidationError(): - domain_seed = DomainSeed(value=value, label=label) - self.add_seed(domain_seed, elb_res=elb, aws_client=client) - except ClientError as e: - self.logger.error(f"Could not connect to ELB V2. Error: {e}") - - def get_load_balancers(self): - """Retrieve Elastic Load Balancers (ELB) data and emit seeds.""" - self.get_load_balancers_v1() - self.get_load_balancers_v2() - - def get_network_interfaces(self): - """Retrieve EC2 Elastic Network Interfaces (ENI) data and emit seeds.""" - label = self.format_label(SeedLabel.NETWORK_INTERFACE) - - interfaces = self.describe_network_interfaces() - instance_tags = self.get_resource_tags() + instance_tags = await self.get_resource_tags(credentials) for ip_address, record in interfaces.items(): instance_id = record["InstanceId"] @@ -408,71 +592,108 @@ def get_network_interfaces(self): ip_seed = IpSeed(value=ip_address, label=label) self.add_seed(ip_seed, tags=tags) - def describe_network_interfaces(self) -> dict: + async def describe_network_interfaces( + self, + provider_setting: AwsSpecificSettings, + credentials: AwsCredentials, + region: str, + ignored_tags: list[str], + ) -> dict: """Retrieve EC2 Elastic Network Interfaces (ENI) data. + Args: + provider_setting (AwsSpecificSettings): AWS provider settings. + credentials (AwsCredentials): AWS credentials. + region (str): AWS region. + ignored_tags (list[str], optional): List of tags to ignore. Defaults to IGNORED_TAGS. + Returns: dict: Network Interfaces. """ - ec2: EC2Client = self.get_aws_client(AwsServices.EC2) interfaces = {} - # https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/ec2.html#EC2.Client.describe_network_interfaces - filters: Sequence[FilterTypeDef] = [ - {"Name": "association.public-ip", "Values": ["*"]} - ] - - try: - data = ec2.describe_network_interfaces(Filters=filters) - for network in data.get("NetworkInterfaces", {}): - network_interface_id = network.get("NetworkInterfaceId") - instance_id = network.get("Attachment", {}).get("InstanceId") + # aws_kwargs = await self.get_aws_client_kwargs( + # service=AwsServices.EC2, # type: ignore + # credentials=credentials, + # ) + async with get_session().create_client( + "ec2", + **credentials, + ) as ec2: # type: ignore + ec2: EC2Client # type: ignore[no-redef] + + # https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/ec2.html#EC2.Client.describe_network_interfaces + filters: Sequence[FilterTypeDef] = [ + {"Name": "association.public-ip", "Values": ["*"]} + ] - if self.network_interfaces_ignored_tags(network): - self.logger.debug( - f"Skipping ignored tag for network interface {network_interface_id}" - ) - continue + try: + data = await ec2.describe_network_interfaces(Filters=filters) + for network in data.get("NetworkInterfaces", {}): + network_interface_id = network.get("NetworkInterfaceId") + instance_id = network.get("Attachment", {}).get("InstanceId") + + if self.network_interfaces_ignored_tags(network): + self.logger.debug( + f"Skipping ignored tag for network interface {network_interface_id}" + ) + continue - for addresses in network.get("PrivateIpAddresses", []): - if ip_address := addresses.get("Association", {}).get("PublicIp"): - interfaces[ip_address] = { - "NetworkInterfaceId": network_interface_id, - "InstanceId": instance_id, - } - except ClientError as e: - self.logger.error(f"Could not connect to ENI Service. Error: {e}") + for addresses in network.get("PrivateIpAddresses", []): + if ip_address := addresses.get("Association", {}).get( + "PublicIp" + ): + interfaces[ip_address] = { + "NetworkInterfaceId": network_interface_id, + "InstanceId": instance_id, + } + except ClientError as e: + self.logger.error(f"Could not connect to ENI Service. Error: {e}") return interfaces - def get_resource_tags_paginated( - self, resource_types: list[str] = None - ) -> Generator[TagDescriptionTypeDef, None, None]: + async def get_resource_tags_paginated( + self, credentials: AwsCredentials, resource_types: Optional[list[str]] = None + ) -> AsyncGenerator[TagDescriptionTypeDef, None]: """Retrieve EC2 resource tags paginated. Args: + credentials (AwsCredentials): AWS credentials. resource_types (Optional[list[str]]): Resource types. Defaults to None. Yields: - Generator[TagDescriptionTypeDef]: Tags. + AsyncGenerator[TagDescriptionTypeDef]: Tags. """ - ec2: EC2Client = self.get_aws_client(AwsServices.EC2) - paginator = ec2.get_paginator( - "describe_tags", - ) - - for page in paginator.paginate( - Filters=[ - {"Name": "resource-type", "Values": resource_types or ["instance"]} - ] - ): - tags = page.get("Tags", []) - yield from tags + # aws_kwargs = await self.get_aws_client_kwargs( + # service=AwsServices.EC2, # type: ignore + # ) + async with get_session().create_client( + "ec2", + **credentials, + ) as ec2: # type: ignore + ec2: EC2Client # type: ignore[no-redef] - def get_resource_tags(self, resource_types: list[str] = None) -> dict: + try: + async for page in ec2.get_paginator("describe_tags",).paginate( + Filters=[ + { + "Name": "resource-type", + "Values": resource_types or ["instance"], + } + ] # type: ignore + ): + for tag in page.get("Tags", []): # noqa: SIM104 + yield tag + except ClientError as e: + self.logger.error(f"Could not connect to EC2 Service. Error: {e}") + + async def get_resource_tags( + self, credentials: AwsCredentials, resource_types: Optional[list[str]] = None + ) -> dict: """Get EC2 resource tags based on resource types. Args: + credentials (AwsCredentials): AWS credentials. resource_types (list[str]): Resource type filter. Returns: @@ -480,7 +701,7 @@ def get_resource_tags(self, resource_types: list[str] = None) -> dict: """ resource_tags: dict = {} - for tag in self.get_resource_tags_paginated(resource_types): + async for tag in self.get_resource_tags_paginated(credentials, resource_types): # Tags come in two formats: # 1. Tag = { Key = "Name", Value= "actual-tag-name" } # 2. Tag = { Key = "actual-key-name", Value = "tag-value-that-is-unused-here"} @@ -510,127 +731,187 @@ def network_interfaces_ignored_tags(self, data: NetworkInterfaceTypeDef) -> bool tags = self.extract_tags_from_tagset(tag_set) return self.has_ignored_tag(tags) - def get_rds_instances(self): - """Retrieve Relational Database Services (RDS) data and emit seeds.""" - client: RDSClient = self.get_aws_client(service=AwsServices.RDS) - label = self.format_label(SeedLabel.RDS) + async def get_rds_instances( + self, + provider_setting: AwsSpecificSettings, + credentials: AwsCredentials, + region: str, + ignored_tags: list[str], + current_service: str, + ) -> None: + """Retrieve Relational Database Services (RDS) data and emit seeds. - try: - data = client.describe_db_instances() - for instance in data.get("DBInstances", []): - if not instance.get("PubliclyAccessible"): - continue + Args: + provider_setting (AwsSpecificSettings): AWS provider settings. + credentials (AwsCredentials): AWS credentials. + region (str): AWS region. + ignored_tags (list[str], optional): List of tags to ignore. Defaults to IGNORED_TAGS. + current_service (str): Current service. + """ + label = self.format_label(SeedLabel.RDS, region) - if domain_name := instance.get("Endpoint", {}).get("Address"): - with SuppressValidationError(): - domain_seed = DomainSeed(value=domain_name, label=label) - self.add_seed(domain_seed, rds_res=instance) - except ClientError as e: - self.logger.error(f"Could not connect to RDS Service. Error: {e}") + # aws_kwargs = await self.get_aws_client_kwargs( + # service=AwsServices.RDS, # type: ignore + # ) + async with get_session().create_client( + "rds", + **credentials, + ) as client: # type: ignore + client: RDSClient # type: ignore[no-redef] + + try: + data = await client.describe_db_instances() + for instance in data.get("DBInstances", []): + if not instance.get("PubliclyAccessible"): + continue - def _get_route53_zone_hosts(self, client: botocore.client.BaseClient) -> dict: - """Retrieve Route 53 Zone hosts. + if domain_name := instance.get("Endpoint", {}).get("Address"): + with SuppressValidationError(): + domain_seed = DomainSeed(value=domain_name, label=label) + self.add_seed(domain_seed, rds_res=instance) + except ClientError as e: + self.logger.error(f"Could not connect to RDS Service. Error: {e}") - Args: - client (botocore.client.BaseClient): Route53 Client + async def get_route53_zones( + self, + provider_setting: AwsSpecificSettings, + credentials: AwsCredentials, + region: str, + ignored_tags: list[str], + current_service: str, + ) -> None: + """Retrieve Route 53 Zones and emit seeds. - Returns: - dict: Hosted Zones + Args: + provider_setting (AwsSpecificSettings): AWS provider settings. + credentials (AwsCredentials): AWS credentials. + region (str): AWS region. + ignored_tags (list[str], optional): List of tags to ignore. Defaults to IGNORED_TAGS. + current_service (str): Current service. """ - return client.get_paginator("list_hosted_zones").paginate().build_full_result() + label = self.format_label(SeedLabel.ROUTE53_ZONES, region) - def _get_route53_zone_resources( - self, client: botocore.client.BaseClient, hosted_zone_id - ) -> dict: - """Retrieve Route 53 Zone resources. + # aws_kwargs = await self.get_aws_client_kwargs( + # service=AwsServices.ROUTE53_ZONES, # type: ignore + # ) + async with get_session().create_client( + "route53", + **credentials, + ) as client: # type: ignore + client: Route53Client # type: ignore[no-redef] - Args: - client (botocore.client.BaseClient): Route53 client - hosted_zone_id (str): Hosted Zone Id + try: + async for zones in client.get_paginator("list_hosted_zones").paginate(): + for zone in zones.get("HostedZones", []): + if not zone or zone.get("Config", {}).get("PrivateZone"): + continue - Returns: - dict: Resource Record Sets. + # Add the zone itself as a seed + domain_name = zone["Name"].rstrip(".") + with SuppressValidationError(): + domain_seed = DomainSeed(value=domain_name, label=label) + self.add_seed( + domain_seed, route53_zone_res=zone, aws_client=client + ) + + hosted_zone_id = zone["Id"] + async for resource_sets in client.get_paginator( + "list_resource_record_sets" + ).paginate( + HostedZoneId=hosted_zone_id, + # StartRecordName="*", + ): + for resource_set in resource_sets.get( + "ResourceRecordSets", [] + ): + if resource_set.get("Type") not in VALID_RECORD_TYPES: + continue + + domain_name = resource_set["Name"].rstrip(".") + with SuppressValidationError(): + domain_seed = DomainSeed( + value=domain_name, label=label + ) + self.add_seed( + domain_seed, + route53_zone_res=zone, + aws_client=client, + ) + except ClientError as e: + self.logger.error(f"Could not connect to Route 53 Zones. Error: {e}") + + async def get_ecs_instances( + self, + provider_setting: AwsSpecificSettings, + credentials: AwsCredentials, + region: str, + ignored_tags: list[str], + current_service: str, + ) -> None: + """Retrieve Elastic Container Service data and emit seeds. + + Args: + provider_setting (AwsSpecificSettings): AWS provider settings. + credentials (AwsCredentials): AWS credentials. + region (str): AWS region. + ignored_tags (list[str], optional): List of tags to ignore. Defaults to IGNORED_TAGS. + current_service (str): Current service. """ - return ( - client.get_paginator("list_resource_record_sets") - .paginate( - HostedZoneId=hosted_zone_id, - StartRecordName="*", - ) - .build_full_result() - ) + label = self.format_label(SeedLabel.ECS, region) + + # ecs_aws_kwargs = await self.get_aws_client_kwargs( + # service=AwsServices.ECS, # type: ignore + # ) + # ec2_aws_kwargs = await self.get_aws_client_kwargs( + # service=AwsServices.EC2, # type: ignore + # ) + async with get_session().create_client( + "ecs", + **credentials, + ) as ecs, get_session().create_client( + "ec2", + **credentials, + ) as ec2: # type: ignore + ecs: ECSClient # type: ignore[no-redef] + ec2: EC2Client # type: ignore[no-redef] - def get_route53_zones(self): - """Retrieve Route 53 Zones and emit seeds.""" - client: Route53Client = self.get_aws_client(service=AwsServices.ROUTE53_ZONES) - label = self.format_label(SeedLabel.ROUTE53_ZONES) - - try: - zones = self._get_route53_zone_hosts(client) - for zone in zones.get("HostedZones", []): - if zone.get("Config", {}).get("PrivateZone"): - continue - - # Add the zone itself as a seed - domain_name = zone.get("Name").rstrip(".") - with SuppressValidationError(): - domain_seed = DomainSeed(value=domain_name, label=label) - self.add_seed(domain_seed, route53_zone_res=zone, aws_client=client) - - id = zone.get("Id") - resource_sets = self._get_route53_zone_resources(client, id) - for resource_set in resource_sets.get("ResourceRecordSets", []): - if resource_set.get("Type") not in VALID_RECORD_TYPES: + try: + clusters = await ecs.list_clusters() + for cluster in clusters.get("clusterArns", []): + cluster_instances = await ecs.list_container_instances( + cluster=cluster + ) + containers = cluster_instances.get("containerInstanceArns", []) + if len(containers) == 0: continue - domain_name = resource_set.get("Name").rstrip(".") - with SuppressValidationError(): - domain_seed = DomainSeed(value=domain_name, label=label) - self.add_seed( - domain_seed, route53_zone_res=zone, aws_client=client - ) - except ClientError as e: - self.logger.error(f"Could not connect to Route 53 Zones. Error: {e}") - - def get_ecs_instances(self): - """Retrieve Elastic Container Service data and emit seeds.""" - ecs: ECSClient = self.get_aws_client(AwsServices.ECS) - ec2: EC2Client = self.get_aws_client(AwsServices.EC2) - label = self.format_label(SeedLabel.ECS) - - try: - clusters = ecs.list_clusters() - for cluster in clusters.get("clusterArns", []): - cluster_instances = ecs.list_container_instances(cluster=cluster) - containers = cluster_instances.get("containerInstanceArns", []) - if len(containers) == 0: - continue - - instances = ecs.describe_container_instances( - cluster=cluster, containerInstances=containers - ) - - instance_ids = [ - i.get("ec2InstanceId") - for i in instances.get("containerInstances", []) - ] - if not instance_ids: - continue - - descriptions = ec2.describe_instances(InstanceIds=instance_ids) - for reservation in descriptions.get("Reservations", []): - for instance in reservation.get("Instances", []): - ip_address = instance.get("PublicIpAddress") - if not ip_address: - continue + instances = await ecs.describe_container_instances( + cluster=cluster, containerInstances=containers + ) - with SuppressValidationError(): - ip_seed = IpSeed(value=ip_address, label=label) - self.add_seed(ip_seed, ecs_res=instance) - except ClientError as e: - self.logger.error(f"Could not connect to ECS. Error: {e}") + instance_ids = [ + i["ec2InstanceId"] + for i in instances.get("containerInstances", []) + ] + if not instance_ids: + continue - def get_s3_region(self, client: S3Client, bucket: str) -> str: + descriptions = await ec2.describe_instances( + InstanceIds=instance_ids + ) + for reservation in descriptions.get("Reservations", []): + for instance in reservation.get("Instances", []): + ip_address = instance.get("PublicIpAddress") + if not ip_address: + continue + + with SuppressValidationError(): + ip_seed = IpSeed(value=ip_address, label=label) + self.add_seed(ip_seed, ecs_res=instance) + except ClientError as e: + self.logger.error(f"Could not connect to ECS. Error: {e}") + + async def get_s3_region(self, client: S3Client, bucket: str) -> str: """Lookup S3 bucket location. Args: @@ -640,38 +921,60 @@ def get_s3_region(self, client: S3Client, bucket: str) -> str: Returns: str: Bucket location (or us-east-1 for legacy buckets) """ - location = client.get_bucket_location(Bucket=bucket)["LocationConstraint"] - return location or "us-east-1" - - def get_s3_instances(self): - """Retrieve Simple Storage Service data and emit seeds.""" - client: S3Client = self.get_aws_client(service=AwsServices.STORAGE_BUCKET) - - try: - data = client.list_buckets().get("Buckets", []) - for bucket in data: - bucket_name = bucket.get("Name") - if not bucket_name: - continue - - region = self.get_s3_region(client, bucket_name) - label = self.format_label(SeedLabel.STORAGE_BUCKET, region) - - with SuppressValidationError(): - bucket_asset = AwsStorageBucketAsset( - value=AwsStorageBucketAsset.url(bucket_name, region), - uid=label, - scan_data={ - "accountNumber": self.account_number, - }, - ) - self.add_cloud_asset( - bucket_asset, bucket_name=bucket_name, aws_client=client - ) - except ClientError as e: - self.logger.error(f"Could not connect to S3. Error: {e}") + location = await client.get_bucket_location(Bucket=bucket) + return location.get("LocationConstraint") or "us-east-1" + + async def get_s3_instances( + self, + provider_setting: AwsSpecificSettings, + credentials: AwsCredentials, + region: str, + ignored_tags: list[str], + current_service: str, + ) -> None: + """Retrieve Simple Storage Service data and emit seeds. + + Args: + provider_setting (AwsSpecificSettings): AWS provider settings. + credentials (AwsCredentials): AWS credentials. + region (str): AWS region. + ignored_tags (list[str], optional): List of tags to ignore. Defaults to IGNORED_TAGS. + current_service (str): Current service. + """ + # aws_kwargs = await self.get_aws_client_kwargs( + # service=AwsServices.STORAGE_BUCKET, # type: ignore + # ) + async with get_session().create_client( + "s3", + **credentials, + ) as client: # type: ignore + client: S3Client # type: ignore[no-redef] + + try: + data = await client.list_buckets() + for bucket in data.get("Buckets", []): + bucket_name = bucket.get("Name") + if not bucket_name: + continue + + region = await self.get_s3_region(client, bucket_name) + label = self.format_label(SeedLabel.STORAGE_BUCKET, region) + + with SuppressValidationError(): + bucket_asset = AwsStorageBucketAsset( # type: ignore + value=AwsStorageBucketAsset.url(bucket_name, region), + uid=label, + scan_data={ + "accountNumber": self.account_number, + }, + ) + self.add_cloud_asset( + bucket_asset, bucket_name=bucket_name, aws_client=client + ) + except ClientError as e: + self.logger.error(f"Could not connect to S3. Error: {e}") - def get_ignored_tags(self, tags: Optional[list[str]] = None): + def get_ignored_tags(self, tags: Optional[list[str]] = None) -> list[str]: """Generate ignored tags based off provider settings and global ignore list. Args: @@ -681,7 +984,7 @@ def get_ignored_tags(self, tags: Optional[list[str]] = None): list[str]: Ignored tags. """ if not tags: - return self.global_ignored_tags + return list(self.global_ignored_tags) ignored = self.global_ignored_tags.copy() ignored.update(tags) diff --git a/src/censys/cloud_connectors/aws_connector/credentials.py b/src/censys/cloud_connectors/aws_connector/credentials.py new file mode 100644 index 0000000..f78329f --- /dev/null +++ b/src/censys/cloud_connectors/aws_connector/credentials.py @@ -0,0 +1,234 @@ +"""AWS credentials.""" + +from typing import Optional, TypedDict + +from aiobotocore.session import get_session +from types_aiobotocore_sts.client import STSClient + +from .settings import AwsAccount, AwsSpecificSettings + + +class StsClientKwargs(TypedDict, total=False): + """STS Client kwargs.""" + + aws_access_key_id: Optional[str] + aws_secret_access_key: Optional[str] + region_name: Optional[str] + + +class AwsCredentials(TypedDict, total=False): + """AWS credentials.""" + + aws_access_key_id: Optional[str] + aws_secret_access_key: Optional[str] + aws_session_token: Optional[str] + region_name: Optional[str] + + +async def get_aws_credentials( + provider_settings: AwsSpecificSettings, + account: Optional[AwsAccount] = None, + region: Optional[str] = None, +) -> AwsCredentials: + """Get AWS credentials. + + Args: + provider_settings (AwsSpecificSettings): The provider settings. + account (Optional[AwsAccount], optional): The account. Defaults to None. + region (Optional[str], optional): The region. Defaults to None. + + Returns: + AwsCredentials: The AWS credentials. + """ + provider_has_credentials = bool( + provider_settings.access_key and provider_settings.secret_key + ) + provider_has_role = bool( + provider_settings.role_name and provider_settings.role_session_name + ) + + # If an account is provided, use it + if account: + account_has_credentials = bool(account.access_key and account.secret_key) + account_has_role = bool(account.role_name and account.role_session_name) + + # If the account has a role and credentials, assume it using the account credentials + if account_has_credentials and account_has_role: + assert account.role_name + assert account.role_session_name + return await assume_role( + account.account_number, + account.role_name, + account.role_session_name, + access_key=account.access_key, + secret_key=account.secret_key, + region=region, + ) + + # If the provider has a role and the account has credentials, assume it using the account credentials + if provider_has_role and account_has_credentials: + assert provider_settings.role_name + assert provider_settings.role_session_name + return await assume_role( + account.account_number, + provider_settings.role_name, + provider_settings.role_session_name, + access_key=account.access_key, + secret_key=account.secret_key, + region=region, + ) + + # If the account has a role and the provider has credentials, assume it using the provider credentials + if account_has_role and provider_has_credentials: + assert account.role_name + assert account.role_session_name + return await assume_role( + account.account_number, + account.role_name, + account.role_session_name, + access_key=provider_settings.access_key, + secret_key=provider_settings.secret_key, + region=region, + ) + + # If the provider has a role and credentials, assume it using the provider credentials + if provider_has_role and provider_has_credentials: + assert provider_settings.role_name + assert provider_settings.role_session_name + return await assume_role( + account.account_number, + provider_settings.role_name, + provider_settings.role_session_name, + access_key=provider_settings.access_key, + secret_key=provider_settings.secret_key, + region=region, + ) + + # If neither the provider nor the account have credentials, but the provider has a role, assume it using local credentials + if provider_has_role: + assert provider_settings.role_name + assert provider_settings.role_session_name + return await assume_role( + account.account_number, + provider_settings.role_name, + provider_settings.role_session_name, + region=region, + ) + + # If neither the provider nor the account have credentials, but the account has a role, assume it using local credentials + if account_has_role: + assert account.role_name + assert account.role_session_name + return await assume_role( + account.account_number, + account.role_name, + account.role_session_name, + region=region, + ) + + # If neither the provider nor the account have credentials or roles, use local credentials + return {} + + # If the provider has a role and credentials, assume it using the provider credentials + if provider_has_role and provider_has_credentials: + assert provider_settings.role_name + assert provider_settings.role_session_name + return await assume_role( + provider_settings.account_number, + provider_settings.role_name, + provider_settings.role_session_name, + access_key=provider_settings.access_key, + secret_key=provider_settings.secret_key, + region=region, + ) + + # If the provider has a role, but no credentials, assume it using local credentials + if provider_has_role: + assert provider_settings.role_name + assert provider_settings.role_session_name + return await assume_role( + provider_settings.account_number, + provider_settings.role_name, + provider_settings.role_session_name, + region=region, + ) + + credentials: AwsCredentials = {} + + # If there is a region specified, use it + if region: + credentials["region_name"] = region + + # If the provider has credentials, but no role, use them + if provider_has_credentials: + assert provider_settings.access_key + assert provider_settings.secret_key + credentials = { + "aws_access_key_id": provider_settings.access_key, + "aws_secret_access_key": provider_settings.secret_key, + } + + # If the provider has neither credentials nor a role, use local credentials + return credentials + + +async def assume_role( + account_number: str, + role_name: str, + role_session_name: str, + access_key: Optional[str] = None, + secret_key: Optional[str] = None, + region: Optional[str] = None, +) -> AwsCredentials: + """Assume an AWS role. + + Args: + account_number (str): The account number. + role_name (str): The role name. + role_session_name (str): The role session name. + access_key (str, optional): The access key. Defaults to None. + secret_key (str, optional): The secret key. Defaults to None. + region (str, optional): The region. Defaults to None. + + Returns: + AwsCredentials: The AWS credentials. + """ + session = get_session() + + # Format the role arn + role_arn = f"arn:aws:iam::{account_number}:role/{role_name}" + + # kwargs for the sts client + client_kwargs: StsClientKwargs = {} + + # Add the access key and secret key if they were provided + if access_key and secret_key: + client_kwargs["aws_access_key_id"] = access_key + client_kwargs["aws_secret_access_key"] = secret_key + + # Add the region if it was provided + if region: + client_kwargs["region_name"] = region + + # Create the sts client + async with session.create_client("sts", **client_kwargs) as client: + client: STSClient # type: ignore[no-redef] + # Assume the role + response = await client.assume_role( + RoleArn=role_arn, + RoleSessionName=role_session_name, + ) + + assumed_credentials = response["Credentials"] + + credentials: AwsCredentials = { + "aws_access_key_id": assumed_credentials["AccessKeyId"], + "aws_secret_access_key": assumed_credentials["SecretAccessKey"], + "aws_session_token": assumed_credentials["SessionToken"], + } + + # Add the region if it was provided + if region: + credentials["region_name"] = region + + return credentials diff --git a/src/censys/cloud_connectors/aws_connector/settings.py b/src/censys/cloud_connectors/aws_connector/settings.py index 5240df9..9058f7b 100644 --- a/src/censys/cloud_connectors/aws_connector/settings.py +++ b/src/censys/cloud_connectors/aws_connector/settings.py @@ -37,7 +37,9 @@ class AwsSpecificSettings(ProviderSpecificSettings): secret_key: Optional[str] = Field(min_length=1) role_name: Optional[str] = Field(min_length=1) role_session_name: Optional[str] = Field(min_length=1) - ignore_tags: Optional[list[str]] = Field(min_length=1) + ignore_tags: list[str] = Field( + description="Tags to ignore", default=["censys-cloud-connector-ignore"] + ) session_token: Optional[str] = Field(min_length=1) external_id: Optional[str] = Field(min_length=1) diff --git a/src/censys/cloud_connectors/common/cli/commands/scan.py b/src/censys/cloud_connectors/common/cli/commands/scan.py index 6ebf1dd..5c5d3e0 100644 --- a/src/censys/cloud_connectors/common/cli/commands/scan.py +++ b/src/censys/cloud_connectors/common/cli/commands/scan.py @@ -22,7 +22,6 @@ async def cli_scan(args: argparse.Namespace): logger = get_logger(log_name="censys_cloud_connectors", level="INFO") logger.info("Censys Cloud Connectors Version: %s", __version__) - logger.info("Async scan started") try: settings = Settings(_env_file=".env") # type: ignore diff --git a/src/censys/cloud_connectors/gcp_connector/connector.py b/src/censys/cloud_connectors/gcp_connector/connector.py index 357ec52..997d900 100644 --- a/src/censys/cloud_connectors/gcp_connector/connector.py +++ b/src/censys/cloud_connectors/gcp_connector/connector.py @@ -104,7 +104,9 @@ async def scan_all(self): """Scan all Gcp Organizations.""" provider_settings: dict[ tuple, GcpSpecificSettings - ] = self.settings.providers.get(self.provider, {}) + ] = self.settings.providers.get( + self.provider, {} + ) # type: ignore for provider_setting in provider_settings.values(): await self.scan(provider_setting) diff --git a/templates/aws/stackset_role_deploy.json b/templates/aws/stackset_role_deploy.json index 0e003ab..0299112 100644 --- a/templates/aws/stackset_role_deploy.json +++ b/templates/aws/stackset_role_deploy.json @@ -42,14 +42,28 @@ "ManagedPolicyArns": ["arn:aws:iam::aws:policy/SecurityAudit"], "Policies": [ { - "PolicyName": "CensysAPIGatewayPolicy", + "PolicyName": "CensysCloudConnectorPolicy", "PolicyDocument": { "Version": "2012-10-17", "Statement": [ { "Sid": "CensysCloudConnectorPolicy", "Effect": "Allow", - "Action": ["apigateway:GET"], + "Action": [ + "apigateway:GET", + "ec2:DescribeTags", + "ec2:DescribeNetworkInterfaces", + "ecs:ListContainerInstances", + "ecs:ListClusters", + "elasticloadbalancing:DescribeLoadBalancers", + "rds:DescribeDBInstances", + "route53:ListHostedZones", + "route53:ListResourceRecordSets", + "route53domains:ListDomains", + "s3:GetBucketLocation", + "s3:ListAllMyBuckets", + "s3:ListBucket" + ], "Resource": "*" } ] From 76d32c188c73b4fb104935961a397ea7a4f57c08 Mon Sep 17 00:00:00 2001 From: Aidan Holland Date: Tue, 20 Jun 2023 12:49:20 -0400 Subject: [PATCH 04/19] fix(ci): remove additional curly brace --- .github/actions/container-build-buildah/action.yaml | 2 +- .github/workflows/pull-request.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/actions/container-build-buildah/action.yaml b/.github/actions/container-build-buildah/action.yaml index d05ae5c..8cabd9d 100644 --- a/.github/actions/container-build-buildah/action.yaml +++ b/.github/actions/container-build-buildah/action.yaml @@ -61,7 +61,7 @@ runs: run: | PR_NUMBER=pr-$(echo $GITHUB_REF | cut -d '/' -f3) buildah tag ${{ inputs.image }}:${GITHUB_SHA} ${{ inputs.image }}:${PR_NUMBER} - buildah push ${{ inputs.image }}:${PR_NUMBER}} + buildah push ${{ inputs.image }}:${PR_NUMBER} - name: Tag Image with Version shell: bash diff --git a/.github/workflows/pull-request.yaml b/.github/workflows/pull-request.yaml index 685b728..f28c868 100644 --- a/.github/workflows/pull-request.yaml +++ b/.github/workflows/pull-request.yaml @@ -16,6 +16,6 @@ jobs: test: uses: ./.github/workflows/reusable-test.yaml - + lint: uses: ./.github/workflows/reusable-lint.yaml From 9ca194e79c455df19648961de0ef4735630a60cf Mon Sep 17 00:00:00 2001 From: Aidan Holland Date: Tue, 20 Jun 2023 13:39:57 -0400 Subject: [PATCH 05/19] chore: remove boto3_stubs references --- poetry.lock | 1063 ++++++++--------- pyproject.toml | 3 +- .../aws_connector/connector.py | 110 +- .../azure_connector/connector.py | 3 +- .../cloud_connectors/common/connector.py | 3 +- .../common/plugins/registry.py | 6 +- .../cloud_connectors/plugins/aws_tags.py | 62 +- tests/test_aws_connector.py | 2 +- 8 files changed, 608 insertions(+), 644 deletions(-) diff --git a/poetry.lock b/poetry.lock index 6567c93..228a994 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2214,382 +2214,6 @@ s3transfer = ">=0.6.0,<0.7.0" [package.extras] crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] -[[package]] -name = "boto3-stubs" -version = "1.26.130" -description = "Type annotations for boto3 1.26.130 generated with mypy-boto3-builder 7.14.5" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "boto3-stubs-1.26.130.tar.gz", hash = "sha256:6718d6acf3eb3ba3cd6d581f3e1f5df0ae7c4d66967d7f3957f02baff11a2966"}, - {file = "boto3_stubs-1.26.130-py3-none-any.whl", hash = "sha256:08502537de66e5e06bcdc3c5329e0f82b5cd676ec2b992234044ab2cb7251ee9"}, -] - -[package.dependencies] -botocore-stubs = "*" -mypy-boto3-apigateway = {version = ">=1.26.0,<1.27.0", optional = true, markers = "extra == \"apigateway\""} -mypy-boto3-apigatewayv2 = {version = ">=1.26.0,<1.27.0", optional = true, markers = "extra == \"apigatewayv2\""} -mypy-boto3-ec2 = {version = ">=1.26.0,<1.27.0", optional = true, markers = "extra == \"ec2\""} -mypy-boto3-ecs = {version = ">=1.26.0,<1.27.0", optional = true, markers = "extra == \"ecs\""} -mypy-boto3-elb = {version = ">=1.26.0,<1.27.0", optional = true, markers = "extra == \"elb\""} -mypy-boto3-elbv2 = {version = ">=1.26.0,<1.27.0", optional = true, markers = "extra == \"elbv2\""} -mypy-boto3-rds = {version = ">=1.26.0,<1.27.0", optional = true, markers = "extra == \"rds\""} -mypy-boto3-route53 = {version = ">=1.26.0,<1.27.0", optional = true, markers = "extra == \"route53\""} -mypy-boto3-route53domains = {version = ">=1.26.0,<1.27.0", optional = true, markers = "extra == \"route53domains\""} -mypy-boto3-s3 = {version = ">=1.26.0,<1.27.0", optional = true, markers = "extra == \"s3\""} -mypy-boto3-sts = {version = ">=1.26.0,<1.27.0", optional = true, markers = "extra == \"sts\""} -types-s3transfer = "*" - -[package.extras] -accessanalyzer = ["mypy-boto3-accessanalyzer (>=1.26.0,<1.27.0)"] -account = ["mypy-boto3-account (>=1.26.0,<1.27.0)"] -acm = ["mypy-boto3-acm (>=1.26.0,<1.27.0)"] -acm-pca = ["mypy-boto3-acm-pca (>=1.26.0,<1.27.0)"] -alexaforbusiness = ["mypy-boto3-alexaforbusiness (>=1.26.0,<1.27.0)"] -all = ["mypy-boto3-accessanalyzer (>=1.26.0,<1.27.0)", "mypy-boto3-account (>=1.26.0,<1.27.0)", "mypy-boto3-acm (>=1.26.0,<1.27.0)", "mypy-boto3-acm-pca (>=1.26.0,<1.27.0)", "mypy-boto3-alexaforbusiness (>=1.26.0,<1.27.0)", "mypy-boto3-amp (>=1.26.0,<1.27.0)", "mypy-boto3-amplify (>=1.26.0,<1.27.0)", "mypy-boto3-amplifybackend (>=1.26.0,<1.27.0)", "mypy-boto3-amplifyuibuilder (>=1.26.0,<1.27.0)", "mypy-boto3-apigateway (>=1.26.0,<1.27.0)", "mypy-boto3-apigatewaymanagementapi (>=1.26.0,<1.27.0)", "mypy-boto3-apigatewayv2 (>=1.26.0,<1.27.0)", "mypy-boto3-appconfig (>=1.26.0,<1.27.0)", "mypy-boto3-appconfigdata (>=1.26.0,<1.27.0)", "mypy-boto3-appflow (>=1.26.0,<1.27.0)", "mypy-boto3-appintegrations (>=1.26.0,<1.27.0)", "mypy-boto3-application-autoscaling (>=1.26.0,<1.27.0)", "mypy-boto3-application-insights (>=1.26.0,<1.27.0)", "mypy-boto3-applicationcostprofiler (>=1.26.0,<1.27.0)", "mypy-boto3-appmesh (>=1.26.0,<1.27.0)", "mypy-boto3-apprunner (>=1.26.0,<1.27.0)", "mypy-boto3-appstream (>=1.26.0,<1.27.0)", "mypy-boto3-appsync (>=1.26.0,<1.27.0)", "mypy-boto3-arc-zonal-shift (>=1.26.0,<1.27.0)", "mypy-boto3-athena (>=1.26.0,<1.27.0)", "mypy-boto3-auditmanager (>=1.26.0,<1.27.0)", "mypy-boto3-autoscaling (>=1.26.0,<1.27.0)", "mypy-boto3-autoscaling-plans (>=1.26.0,<1.27.0)", "mypy-boto3-backup (>=1.26.0,<1.27.0)", "mypy-boto3-backup-gateway (>=1.26.0,<1.27.0)", "mypy-boto3-backupstorage (>=1.26.0,<1.27.0)", "mypy-boto3-batch (>=1.26.0,<1.27.0)", "mypy-boto3-billingconductor (>=1.26.0,<1.27.0)", "mypy-boto3-braket (>=1.26.0,<1.27.0)", "mypy-boto3-budgets (>=1.26.0,<1.27.0)", "mypy-boto3-ce (>=1.26.0,<1.27.0)", "mypy-boto3-chime (>=1.26.0,<1.27.0)", "mypy-boto3-chime-sdk-identity (>=1.26.0,<1.27.0)", "mypy-boto3-chime-sdk-media-pipelines (>=1.26.0,<1.27.0)", "mypy-boto3-chime-sdk-meetings (>=1.26.0,<1.27.0)", "mypy-boto3-chime-sdk-messaging (>=1.26.0,<1.27.0)", "mypy-boto3-chime-sdk-voice (>=1.26.0,<1.27.0)", "mypy-boto3-cleanrooms (>=1.26.0,<1.27.0)", "mypy-boto3-cloud9 (>=1.26.0,<1.27.0)", "mypy-boto3-cloudcontrol (>=1.26.0,<1.27.0)", "mypy-boto3-clouddirectory (>=1.26.0,<1.27.0)", "mypy-boto3-cloudformation (>=1.26.0,<1.27.0)", "mypy-boto3-cloudfront (>=1.26.0,<1.27.0)", "mypy-boto3-cloudhsm (>=1.26.0,<1.27.0)", "mypy-boto3-cloudhsmv2 (>=1.26.0,<1.27.0)", "mypy-boto3-cloudsearch (>=1.26.0,<1.27.0)", "mypy-boto3-cloudsearchdomain (>=1.26.0,<1.27.0)", "mypy-boto3-cloudtrail (>=1.26.0,<1.27.0)", "mypy-boto3-cloudtrail-data (>=1.26.0,<1.27.0)", "mypy-boto3-cloudwatch (>=1.26.0,<1.27.0)", "mypy-boto3-codeartifact (>=1.26.0,<1.27.0)", "mypy-boto3-codebuild (>=1.26.0,<1.27.0)", "mypy-boto3-codecatalyst (>=1.26.0,<1.27.0)", "mypy-boto3-codecommit (>=1.26.0,<1.27.0)", "mypy-boto3-codedeploy (>=1.26.0,<1.27.0)", "mypy-boto3-codeguru-reviewer (>=1.26.0,<1.27.0)", "mypy-boto3-codeguruprofiler (>=1.26.0,<1.27.0)", "mypy-boto3-codepipeline (>=1.26.0,<1.27.0)", "mypy-boto3-codestar (>=1.26.0,<1.27.0)", "mypy-boto3-codestar-connections (>=1.26.0,<1.27.0)", "mypy-boto3-codestar-notifications (>=1.26.0,<1.27.0)", "mypy-boto3-cognito-identity (>=1.26.0,<1.27.0)", "mypy-boto3-cognito-idp (>=1.26.0,<1.27.0)", "mypy-boto3-cognito-sync (>=1.26.0,<1.27.0)", "mypy-boto3-comprehend (>=1.26.0,<1.27.0)", "mypy-boto3-comprehendmedical (>=1.26.0,<1.27.0)", "mypy-boto3-compute-optimizer (>=1.26.0,<1.27.0)", "mypy-boto3-config (>=1.26.0,<1.27.0)", "mypy-boto3-connect (>=1.26.0,<1.27.0)", "mypy-boto3-connect-contact-lens (>=1.26.0,<1.27.0)", "mypy-boto3-connectcampaigns (>=1.26.0,<1.27.0)", "mypy-boto3-connectcases (>=1.26.0,<1.27.0)", "mypy-boto3-connectparticipant (>=1.26.0,<1.27.0)", "mypy-boto3-controltower (>=1.26.0,<1.27.0)", "mypy-boto3-cur (>=1.26.0,<1.27.0)", "mypy-boto3-customer-profiles (>=1.26.0,<1.27.0)", "mypy-boto3-databrew (>=1.26.0,<1.27.0)", "mypy-boto3-dataexchange (>=1.26.0,<1.27.0)", "mypy-boto3-datapipeline (>=1.26.0,<1.27.0)", "mypy-boto3-datasync (>=1.26.0,<1.27.0)", "mypy-boto3-dax (>=1.26.0,<1.27.0)", "mypy-boto3-detective (>=1.26.0,<1.27.0)", "mypy-boto3-devicefarm (>=1.26.0,<1.27.0)", "mypy-boto3-devops-guru (>=1.26.0,<1.27.0)", "mypy-boto3-directconnect (>=1.26.0,<1.27.0)", "mypy-boto3-discovery (>=1.26.0,<1.27.0)", "mypy-boto3-dlm (>=1.26.0,<1.27.0)", "mypy-boto3-dms (>=1.26.0,<1.27.0)", "mypy-boto3-docdb (>=1.26.0,<1.27.0)", "mypy-boto3-docdb-elastic (>=1.26.0,<1.27.0)", "mypy-boto3-drs (>=1.26.0,<1.27.0)", "mypy-boto3-ds (>=1.26.0,<1.27.0)", "mypy-boto3-dynamodb (>=1.26.0,<1.27.0)", "mypy-boto3-dynamodbstreams (>=1.26.0,<1.27.0)", "mypy-boto3-ebs (>=1.26.0,<1.27.0)", "mypy-boto3-ec2 (>=1.26.0,<1.27.0)", "mypy-boto3-ec2-instance-connect (>=1.26.0,<1.27.0)", "mypy-boto3-ecr (>=1.26.0,<1.27.0)", "mypy-boto3-ecr-public (>=1.26.0,<1.27.0)", "mypy-boto3-ecs (>=1.26.0,<1.27.0)", "mypy-boto3-efs (>=1.26.0,<1.27.0)", "mypy-boto3-eks (>=1.26.0,<1.27.0)", "mypy-boto3-elastic-inference (>=1.26.0,<1.27.0)", "mypy-boto3-elasticache (>=1.26.0,<1.27.0)", "mypy-boto3-elasticbeanstalk (>=1.26.0,<1.27.0)", "mypy-boto3-elastictranscoder (>=1.26.0,<1.27.0)", "mypy-boto3-elb (>=1.26.0,<1.27.0)", "mypy-boto3-elbv2 (>=1.26.0,<1.27.0)", "mypy-boto3-emr (>=1.26.0,<1.27.0)", "mypy-boto3-emr-containers (>=1.26.0,<1.27.0)", "mypy-boto3-emr-serverless (>=1.26.0,<1.27.0)", "mypy-boto3-es (>=1.26.0,<1.27.0)", "mypy-boto3-events (>=1.26.0,<1.27.0)", "mypy-boto3-evidently (>=1.26.0,<1.27.0)", "mypy-boto3-finspace (>=1.26.0,<1.27.0)", "mypy-boto3-finspace-data (>=1.26.0,<1.27.0)", "mypy-boto3-firehose (>=1.26.0,<1.27.0)", "mypy-boto3-fis (>=1.26.0,<1.27.0)", "mypy-boto3-fms (>=1.26.0,<1.27.0)", "mypy-boto3-forecast (>=1.26.0,<1.27.0)", "mypy-boto3-forecastquery (>=1.26.0,<1.27.0)", "mypy-boto3-frauddetector (>=1.26.0,<1.27.0)", "mypy-boto3-fsx (>=1.26.0,<1.27.0)", "mypy-boto3-gamelift (>=1.26.0,<1.27.0)", "mypy-boto3-gamesparks (>=1.26.0,<1.27.0)", "mypy-boto3-glacier (>=1.26.0,<1.27.0)", "mypy-boto3-globalaccelerator (>=1.26.0,<1.27.0)", "mypy-boto3-glue (>=1.26.0,<1.27.0)", "mypy-boto3-grafana (>=1.26.0,<1.27.0)", "mypy-boto3-greengrass (>=1.26.0,<1.27.0)", "mypy-boto3-greengrassv2 (>=1.26.0,<1.27.0)", "mypy-boto3-groundstation (>=1.26.0,<1.27.0)", "mypy-boto3-guardduty (>=1.26.0,<1.27.0)", "mypy-boto3-health (>=1.26.0,<1.27.0)", "mypy-boto3-healthlake (>=1.26.0,<1.27.0)", "mypy-boto3-honeycode (>=1.26.0,<1.27.0)", "mypy-boto3-iam (>=1.26.0,<1.27.0)", "mypy-boto3-identitystore (>=1.26.0,<1.27.0)", "mypy-boto3-imagebuilder (>=1.26.0,<1.27.0)", "mypy-boto3-importexport (>=1.26.0,<1.27.0)", "mypy-boto3-inspector (>=1.26.0,<1.27.0)", "mypy-boto3-inspector2 (>=1.26.0,<1.27.0)", "mypy-boto3-internetmonitor (>=1.26.0,<1.27.0)", "mypy-boto3-iot (>=1.26.0,<1.27.0)", "mypy-boto3-iot-data (>=1.26.0,<1.27.0)", "mypy-boto3-iot-jobs-data (>=1.26.0,<1.27.0)", "mypy-boto3-iot-roborunner (>=1.26.0,<1.27.0)", "mypy-boto3-iot1click-devices (>=1.26.0,<1.27.0)", "mypy-boto3-iot1click-projects (>=1.26.0,<1.27.0)", "mypy-boto3-iotanalytics (>=1.26.0,<1.27.0)", "mypy-boto3-iotdeviceadvisor (>=1.26.0,<1.27.0)", "mypy-boto3-iotevents (>=1.26.0,<1.27.0)", "mypy-boto3-iotevents-data (>=1.26.0,<1.27.0)", "mypy-boto3-iotfleethub (>=1.26.0,<1.27.0)", "mypy-boto3-iotfleetwise (>=1.26.0,<1.27.0)", "mypy-boto3-iotsecuretunneling (>=1.26.0,<1.27.0)", "mypy-boto3-iotsitewise (>=1.26.0,<1.27.0)", "mypy-boto3-iotthingsgraph (>=1.26.0,<1.27.0)", "mypy-boto3-iottwinmaker (>=1.26.0,<1.27.0)", "mypy-boto3-iotwireless (>=1.26.0,<1.27.0)", "mypy-boto3-ivs (>=1.26.0,<1.27.0)", "mypy-boto3-ivs-realtime (>=1.26.0,<1.27.0)", "mypy-boto3-ivschat (>=1.26.0,<1.27.0)", "mypy-boto3-kafka (>=1.26.0,<1.27.0)", "mypy-boto3-kafkaconnect (>=1.26.0,<1.27.0)", "mypy-boto3-kendra (>=1.26.0,<1.27.0)", "mypy-boto3-kendra-ranking (>=1.26.0,<1.27.0)", "mypy-boto3-keyspaces (>=1.26.0,<1.27.0)", "mypy-boto3-kinesis (>=1.26.0,<1.27.0)", "mypy-boto3-kinesis-video-archived-media (>=1.26.0,<1.27.0)", "mypy-boto3-kinesis-video-media (>=1.26.0,<1.27.0)", "mypy-boto3-kinesis-video-signaling (>=1.26.0,<1.27.0)", "mypy-boto3-kinesis-video-webrtc-storage (>=1.26.0,<1.27.0)", "mypy-boto3-kinesisanalytics (>=1.26.0,<1.27.0)", "mypy-boto3-kinesisanalyticsv2 (>=1.26.0,<1.27.0)", "mypy-boto3-kinesisvideo (>=1.26.0,<1.27.0)", "mypy-boto3-kms (>=1.26.0,<1.27.0)", "mypy-boto3-lakeformation (>=1.26.0,<1.27.0)", "mypy-boto3-lambda (>=1.26.0,<1.27.0)", "mypy-boto3-lex-models (>=1.26.0,<1.27.0)", "mypy-boto3-lex-runtime (>=1.26.0,<1.27.0)", "mypy-boto3-lexv2-models (>=1.26.0,<1.27.0)", "mypy-boto3-lexv2-runtime (>=1.26.0,<1.27.0)", "mypy-boto3-license-manager (>=1.26.0,<1.27.0)", "mypy-boto3-license-manager-linux-subscriptions (>=1.26.0,<1.27.0)", "mypy-boto3-license-manager-user-subscriptions (>=1.26.0,<1.27.0)", "mypy-boto3-lightsail (>=1.26.0,<1.27.0)", "mypy-boto3-location (>=1.26.0,<1.27.0)", "mypy-boto3-logs (>=1.26.0,<1.27.0)", "mypy-boto3-lookoutequipment (>=1.26.0,<1.27.0)", "mypy-boto3-lookoutmetrics (>=1.26.0,<1.27.0)", "mypy-boto3-lookoutvision (>=1.26.0,<1.27.0)", "mypy-boto3-m2 (>=1.26.0,<1.27.0)", "mypy-boto3-machinelearning (>=1.26.0,<1.27.0)", "mypy-boto3-macie (>=1.26.0,<1.27.0)", "mypy-boto3-macie2 (>=1.26.0,<1.27.0)", "mypy-boto3-managedblockchain (>=1.26.0,<1.27.0)", "mypy-boto3-marketplace-catalog (>=1.26.0,<1.27.0)", "mypy-boto3-marketplace-entitlement (>=1.26.0,<1.27.0)", "mypy-boto3-marketplacecommerceanalytics (>=1.26.0,<1.27.0)", "mypy-boto3-mediaconnect (>=1.26.0,<1.27.0)", "mypy-boto3-mediaconvert (>=1.26.0,<1.27.0)", "mypy-boto3-medialive (>=1.26.0,<1.27.0)", "mypy-boto3-mediapackage (>=1.26.0,<1.27.0)", "mypy-boto3-mediapackage-vod (>=1.26.0,<1.27.0)", "mypy-boto3-mediastore (>=1.26.0,<1.27.0)", "mypy-boto3-mediastore-data (>=1.26.0,<1.27.0)", "mypy-boto3-mediatailor (>=1.26.0,<1.27.0)", "mypy-boto3-memorydb (>=1.26.0,<1.27.0)", "mypy-boto3-meteringmarketplace (>=1.26.0,<1.27.0)", "mypy-boto3-mgh (>=1.26.0,<1.27.0)", "mypy-boto3-mgn (>=1.26.0,<1.27.0)", "mypy-boto3-migration-hub-refactor-spaces (>=1.26.0,<1.27.0)", "mypy-boto3-migrationhub-config (>=1.26.0,<1.27.0)", "mypy-boto3-migrationhuborchestrator (>=1.26.0,<1.27.0)", "mypy-boto3-migrationhubstrategy (>=1.26.0,<1.27.0)", "mypy-boto3-mobile (>=1.26.0,<1.27.0)", "mypy-boto3-mq (>=1.26.0,<1.27.0)", "mypy-boto3-mturk (>=1.26.0,<1.27.0)", "mypy-boto3-mwaa (>=1.26.0,<1.27.0)", "mypy-boto3-neptune (>=1.26.0,<1.27.0)", "mypy-boto3-network-firewall (>=1.26.0,<1.27.0)", "mypy-boto3-networkmanager (>=1.26.0,<1.27.0)", "mypy-boto3-nimble (>=1.26.0,<1.27.0)", "mypy-boto3-oam (>=1.26.0,<1.27.0)", "mypy-boto3-omics (>=1.26.0,<1.27.0)", "mypy-boto3-opensearch (>=1.26.0,<1.27.0)", "mypy-boto3-opensearchserverless (>=1.26.0,<1.27.0)", "mypy-boto3-opsworks (>=1.26.0,<1.27.0)", "mypy-boto3-opsworkscm (>=1.26.0,<1.27.0)", "mypy-boto3-organizations (>=1.26.0,<1.27.0)", "mypy-boto3-osis (>=1.26.0,<1.27.0)", "mypy-boto3-outposts (>=1.26.0,<1.27.0)", "mypy-boto3-panorama (>=1.26.0,<1.27.0)", "mypy-boto3-personalize (>=1.26.0,<1.27.0)", "mypy-boto3-personalize-events (>=1.26.0,<1.27.0)", "mypy-boto3-personalize-runtime (>=1.26.0,<1.27.0)", "mypy-boto3-pi (>=1.26.0,<1.27.0)", "mypy-boto3-pinpoint (>=1.26.0,<1.27.0)", "mypy-boto3-pinpoint-email (>=1.26.0,<1.27.0)", "mypy-boto3-pinpoint-sms-voice (>=1.26.0,<1.27.0)", "mypy-boto3-pinpoint-sms-voice-v2 (>=1.26.0,<1.27.0)", "mypy-boto3-pipes (>=1.26.0,<1.27.0)", "mypy-boto3-polly (>=1.26.0,<1.27.0)", "mypy-boto3-pricing (>=1.26.0,<1.27.0)", "mypy-boto3-privatenetworks (>=1.26.0,<1.27.0)", "mypy-boto3-proton (>=1.26.0,<1.27.0)", "mypy-boto3-qldb (>=1.26.0,<1.27.0)", "mypy-boto3-qldb-session (>=1.26.0,<1.27.0)", "mypy-boto3-quicksight (>=1.26.0,<1.27.0)", "mypy-boto3-ram (>=1.26.0,<1.27.0)", "mypy-boto3-rbin (>=1.26.0,<1.27.0)", "mypy-boto3-rds (>=1.26.0,<1.27.0)", "mypy-boto3-rds-data (>=1.26.0,<1.27.0)", "mypy-boto3-redshift (>=1.26.0,<1.27.0)", "mypy-boto3-redshift-data (>=1.26.0,<1.27.0)", "mypy-boto3-redshift-serverless (>=1.26.0,<1.27.0)", "mypy-boto3-rekognition (>=1.26.0,<1.27.0)", "mypy-boto3-resiliencehub (>=1.26.0,<1.27.0)", "mypy-boto3-resource-explorer-2 (>=1.26.0,<1.27.0)", "mypy-boto3-resource-groups (>=1.26.0,<1.27.0)", "mypy-boto3-resourcegroupstaggingapi (>=1.26.0,<1.27.0)", "mypy-boto3-robomaker (>=1.26.0,<1.27.0)", "mypy-boto3-rolesanywhere (>=1.26.0,<1.27.0)", "mypy-boto3-route53 (>=1.26.0,<1.27.0)", "mypy-boto3-route53-recovery-cluster (>=1.26.0,<1.27.0)", "mypy-boto3-route53-recovery-control-config (>=1.26.0,<1.27.0)", "mypy-boto3-route53-recovery-readiness (>=1.26.0,<1.27.0)", "mypy-boto3-route53domains (>=1.26.0,<1.27.0)", "mypy-boto3-route53resolver (>=1.26.0,<1.27.0)", "mypy-boto3-rum (>=1.26.0,<1.27.0)", "mypy-boto3-s3 (>=1.26.0,<1.27.0)", "mypy-boto3-s3control (>=1.26.0,<1.27.0)", "mypy-boto3-s3outposts (>=1.26.0,<1.27.0)", "mypy-boto3-sagemaker (>=1.26.0,<1.27.0)", "mypy-boto3-sagemaker-a2i-runtime (>=1.26.0,<1.27.0)", "mypy-boto3-sagemaker-edge (>=1.26.0,<1.27.0)", "mypy-boto3-sagemaker-featurestore-runtime (>=1.26.0,<1.27.0)", "mypy-boto3-sagemaker-geospatial (>=1.26.0,<1.27.0)", "mypy-boto3-sagemaker-metrics (>=1.26.0,<1.27.0)", "mypy-boto3-sagemaker-runtime (>=1.26.0,<1.27.0)", "mypy-boto3-savingsplans (>=1.26.0,<1.27.0)", "mypy-boto3-scheduler (>=1.26.0,<1.27.0)", "mypy-boto3-schemas (>=1.26.0,<1.27.0)", "mypy-boto3-sdb (>=1.26.0,<1.27.0)", "mypy-boto3-secretsmanager (>=1.26.0,<1.27.0)", "mypy-boto3-securityhub (>=1.26.0,<1.27.0)", "mypy-boto3-securitylake (>=1.26.0,<1.27.0)", "mypy-boto3-serverlessrepo (>=1.26.0,<1.27.0)", "mypy-boto3-service-quotas (>=1.26.0,<1.27.0)", "mypy-boto3-servicecatalog (>=1.26.0,<1.27.0)", "mypy-boto3-servicecatalog-appregistry (>=1.26.0,<1.27.0)", "mypy-boto3-servicediscovery (>=1.26.0,<1.27.0)", "mypy-boto3-ses (>=1.26.0,<1.27.0)", "mypy-boto3-sesv2 (>=1.26.0,<1.27.0)", "mypy-boto3-shield (>=1.26.0,<1.27.0)", "mypy-boto3-signer (>=1.26.0,<1.27.0)", "mypy-boto3-simspaceweaver (>=1.26.0,<1.27.0)", "mypy-boto3-sms (>=1.26.0,<1.27.0)", "mypy-boto3-sms-voice (>=1.26.0,<1.27.0)", "mypy-boto3-snow-device-management (>=1.26.0,<1.27.0)", "mypy-boto3-snowball (>=1.26.0,<1.27.0)", "mypy-boto3-sns (>=1.26.0,<1.27.0)", "mypy-boto3-sqs (>=1.26.0,<1.27.0)", "mypy-boto3-ssm (>=1.26.0,<1.27.0)", "mypy-boto3-ssm-contacts (>=1.26.0,<1.27.0)", "mypy-boto3-ssm-incidents (>=1.26.0,<1.27.0)", "mypy-boto3-ssm-sap (>=1.26.0,<1.27.0)", "mypy-boto3-sso (>=1.26.0,<1.27.0)", "mypy-boto3-sso-admin (>=1.26.0,<1.27.0)", "mypy-boto3-sso-oidc (>=1.26.0,<1.27.0)", "mypy-boto3-stepfunctions (>=1.26.0,<1.27.0)", "mypy-boto3-storagegateway (>=1.26.0,<1.27.0)", "mypy-boto3-sts (>=1.26.0,<1.27.0)", "mypy-boto3-support (>=1.26.0,<1.27.0)", "mypy-boto3-support-app (>=1.26.0,<1.27.0)", "mypy-boto3-swf (>=1.26.0,<1.27.0)", "mypy-boto3-synthetics (>=1.26.0,<1.27.0)", "mypy-boto3-textract (>=1.26.0,<1.27.0)", "mypy-boto3-timestream-query (>=1.26.0,<1.27.0)", "mypy-boto3-timestream-write (>=1.26.0,<1.27.0)", "mypy-boto3-tnb (>=1.26.0,<1.27.0)", "mypy-boto3-transcribe (>=1.26.0,<1.27.0)", "mypy-boto3-transfer (>=1.26.0,<1.27.0)", "mypy-boto3-translate (>=1.26.0,<1.27.0)", "mypy-boto3-voice-id (>=1.26.0,<1.27.0)", "mypy-boto3-vpc-lattice (>=1.26.0,<1.27.0)", "mypy-boto3-waf (>=1.26.0,<1.27.0)", "mypy-boto3-waf-regional (>=1.26.0,<1.27.0)", "mypy-boto3-wafv2 (>=1.26.0,<1.27.0)", "mypy-boto3-wellarchitected (>=1.26.0,<1.27.0)", "mypy-boto3-wisdom (>=1.26.0,<1.27.0)", "mypy-boto3-workdocs (>=1.26.0,<1.27.0)", "mypy-boto3-worklink (>=1.26.0,<1.27.0)", "mypy-boto3-workmail (>=1.26.0,<1.27.0)", "mypy-boto3-workmailmessageflow (>=1.26.0,<1.27.0)", "mypy-boto3-workspaces (>=1.26.0,<1.27.0)", "mypy-boto3-workspaces-web (>=1.26.0,<1.27.0)", "mypy-boto3-xray (>=1.26.0,<1.27.0)"] -amp = ["mypy-boto3-amp (>=1.26.0,<1.27.0)"] -amplify = ["mypy-boto3-amplify (>=1.26.0,<1.27.0)"] -amplifybackend = ["mypy-boto3-amplifybackend (>=1.26.0,<1.27.0)"] -amplifyuibuilder = ["mypy-boto3-amplifyuibuilder (>=1.26.0,<1.27.0)"] -apigateway = ["mypy-boto3-apigateway (>=1.26.0,<1.27.0)"] -apigatewaymanagementapi = ["mypy-boto3-apigatewaymanagementapi (>=1.26.0,<1.27.0)"] -apigatewayv2 = ["mypy-boto3-apigatewayv2 (>=1.26.0,<1.27.0)"] -appconfig = ["mypy-boto3-appconfig (>=1.26.0,<1.27.0)"] -appconfigdata = ["mypy-boto3-appconfigdata (>=1.26.0,<1.27.0)"] -appflow = ["mypy-boto3-appflow (>=1.26.0,<1.27.0)"] -appintegrations = ["mypy-boto3-appintegrations (>=1.26.0,<1.27.0)"] -application-autoscaling = ["mypy-boto3-application-autoscaling (>=1.26.0,<1.27.0)"] -application-insights = ["mypy-boto3-application-insights (>=1.26.0,<1.27.0)"] -applicationcostprofiler = ["mypy-boto3-applicationcostprofiler (>=1.26.0,<1.27.0)"] -appmesh = ["mypy-boto3-appmesh (>=1.26.0,<1.27.0)"] -apprunner = ["mypy-boto3-apprunner (>=1.26.0,<1.27.0)"] -appstream = ["mypy-boto3-appstream (>=1.26.0,<1.27.0)"] -appsync = ["mypy-boto3-appsync (>=1.26.0,<1.27.0)"] -arc-zonal-shift = ["mypy-boto3-arc-zonal-shift (>=1.26.0,<1.27.0)"] -athena = ["mypy-boto3-athena (>=1.26.0,<1.27.0)"] -auditmanager = ["mypy-boto3-auditmanager (>=1.26.0,<1.27.0)"] -autoscaling = ["mypy-boto3-autoscaling (>=1.26.0,<1.27.0)"] -autoscaling-plans = ["mypy-boto3-autoscaling-plans (>=1.26.0,<1.27.0)"] -backup = ["mypy-boto3-backup (>=1.26.0,<1.27.0)"] -backup-gateway = ["mypy-boto3-backup-gateway (>=1.26.0,<1.27.0)"] -backupstorage = ["mypy-boto3-backupstorage (>=1.26.0,<1.27.0)"] -batch = ["mypy-boto3-batch (>=1.26.0,<1.27.0)"] -billingconductor = ["mypy-boto3-billingconductor (>=1.26.0,<1.27.0)"] -boto3 = ["boto3 (==1.26.130)", "botocore (==1.29.130)"] -braket = ["mypy-boto3-braket (>=1.26.0,<1.27.0)"] -budgets = ["mypy-boto3-budgets (>=1.26.0,<1.27.0)"] -ce = ["mypy-boto3-ce (>=1.26.0,<1.27.0)"] -chime = ["mypy-boto3-chime (>=1.26.0,<1.27.0)"] -chime-sdk-identity = ["mypy-boto3-chime-sdk-identity (>=1.26.0,<1.27.0)"] -chime-sdk-media-pipelines = ["mypy-boto3-chime-sdk-media-pipelines (>=1.26.0,<1.27.0)"] -chime-sdk-meetings = ["mypy-boto3-chime-sdk-meetings (>=1.26.0,<1.27.0)"] -chime-sdk-messaging = ["mypy-boto3-chime-sdk-messaging (>=1.26.0,<1.27.0)"] -chime-sdk-voice = ["mypy-boto3-chime-sdk-voice (>=1.26.0,<1.27.0)"] -cleanrooms = ["mypy-boto3-cleanrooms (>=1.26.0,<1.27.0)"] -cloud9 = ["mypy-boto3-cloud9 (>=1.26.0,<1.27.0)"] -cloudcontrol = ["mypy-boto3-cloudcontrol (>=1.26.0,<1.27.0)"] -clouddirectory = ["mypy-boto3-clouddirectory (>=1.26.0,<1.27.0)"] -cloudformation = ["mypy-boto3-cloudformation (>=1.26.0,<1.27.0)"] -cloudfront = ["mypy-boto3-cloudfront (>=1.26.0,<1.27.0)"] -cloudhsm = ["mypy-boto3-cloudhsm (>=1.26.0,<1.27.0)"] -cloudhsmv2 = ["mypy-boto3-cloudhsmv2 (>=1.26.0,<1.27.0)"] -cloudsearch = ["mypy-boto3-cloudsearch (>=1.26.0,<1.27.0)"] -cloudsearchdomain = ["mypy-boto3-cloudsearchdomain (>=1.26.0,<1.27.0)"] -cloudtrail = ["mypy-boto3-cloudtrail (>=1.26.0,<1.27.0)"] -cloudtrail-data = ["mypy-boto3-cloudtrail-data (>=1.26.0,<1.27.0)"] -cloudwatch = ["mypy-boto3-cloudwatch (>=1.26.0,<1.27.0)"] -codeartifact = ["mypy-boto3-codeartifact (>=1.26.0,<1.27.0)"] -codebuild = ["mypy-boto3-codebuild (>=1.26.0,<1.27.0)"] -codecatalyst = ["mypy-boto3-codecatalyst (>=1.26.0,<1.27.0)"] -codecommit = ["mypy-boto3-codecommit (>=1.26.0,<1.27.0)"] -codedeploy = ["mypy-boto3-codedeploy (>=1.26.0,<1.27.0)"] -codeguru-reviewer = ["mypy-boto3-codeguru-reviewer (>=1.26.0,<1.27.0)"] -codeguruprofiler = ["mypy-boto3-codeguruprofiler (>=1.26.0,<1.27.0)"] -codepipeline = ["mypy-boto3-codepipeline (>=1.26.0,<1.27.0)"] -codestar = ["mypy-boto3-codestar (>=1.26.0,<1.27.0)"] -codestar-connections = ["mypy-boto3-codestar-connections (>=1.26.0,<1.27.0)"] -codestar-notifications = ["mypy-boto3-codestar-notifications (>=1.26.0,<1.27.0)"] -cognito-identity = ["mypy-boto3-cognito-identity (>=1.26.0,<1.27.0)"] -cognito-idp = ["mypy-boto3-cognito-idp (>=1.26.0,<1.27.0)"] -cognito-sync = ["mypy-boto3-cognito-sync (>=1.26.0,<1.27.0)"] -comprehend = ["mypy-boto3-comprehend (>=1.26.0,<1.27.0)"] -comprehendmedical = ["mypy-boto3-comprehendmedical (>=1.26.0,<1.27.0)"] -compute-optimizer = ["mypy-boto3-compute-optimizer (>=1.26.0,<1.27.0)"] -config = ["mypy-boto3-config (>=1.26.0,<1.27.0)"] -connect = ["mypy-boto3-connect (>=1.26.0,<1.27.0)"] -connect-contact-lens = ["mypy-boto3-connect-contact-lens (>=1.26.0,<1.27.0)"] -connectcampaigns = ["mypy-boto3-connectcampaigns (>=1.26.0,<1.27.0)"] -connectcases = ["mypy-boto3-connectcases (>=1.26.0,<1.27.0)"] -connectparticipant = ["mypy-boto3-connectparticipant (>=1.26.0,<1.27.0)"] -controltower = ["mypy-boto3-controltower (>=1.26.0,<1.27.0)"] -cur = ["mypy-boto3-cur (>=1.26.0,<1.27.0)"] -customer-profiles = ["mypy-boto3-customer-profiles (>=1.26.0,<1.27.0)"] -databrew = ["mypy-boto3-databrew (>=1.26.0,<1.27.0)"] -dataexchange = ["mypy-boto3-dataexchange (>=1.26.0,<1.27.0)"] -datapipeline = ["mypy-boto3-datapipeline (>=1.26.0,<1.27.0)"] -datasync = ["mypy-boto3-datasync (>=1.26.0,<1.27.0)"] -dax = ["mypy-boto3-dax (>=1.26.0,<1.27.0)"] -detective = ["mypy-boto3-detective (>=1.26.0,<1.27.0)"] -devicefarm = ["mypy-boto3-devicefarm (>=1.26.0,<1.27.0)"] -devops-guru = ["mypy-boto3-devops-guru (>=1.26.0,<1.27.0)"] -directconnect = ["mypy-boto3-directconnect (>=1.26.0,<1.27.0)"] -discovery = ["mypy-boto3-discovery (>=1.26.0,<1.27.0)"] -dlm = ["mypy-boto3-dlm (>=1.26.0,<1.27.0)"] -dms = ["mypy-boto3-dms (>=1.26.0,<1.27.0)"] -docdb = ["mypy-boto3-docdb (>=1.26.0,<1.27.0)"] -docdb-elastic = ["mypy-boto3-docdb-elastic (>=1.26.0,<1.27.0)"] -drs = ["mypy-boto3-drs (>=1.26.0,<1.27.0)"] -ds = ["mypy-boto3-ds (>=1.26.0,<1.27.0)"] -dynamodb = ["mypy-boto3-dynamodb (>=1.26.0,<1.27.0)"] -dynamodbstreams = ["mypy-boto3-dynamodbstreams (>=1.26.0,<1.27.0)"] -ebs = ["mypy-boto3-ebs (>=1.26.0,<1.27.0)"] -ec2 = ["mypy-boto3-ec2 (>=1.26.0,<1.27.0)"] -ec2-instance-connect = ["mypy-boto3-ec2-instance-connect (>=1.26.0,<1.27.0)"] -ecr = ["mypy-boto3-ecr (>=1.26.0,<1.27.0)"] -ecr-public = ["mypy-boto3-ecr-public (>=1.26.0,<1.27.0)"] -ecs = ["mypy-boto3-ecs (>=1.26.0,<1.27.0)"] -efs = ["mypy-boto3-efs (>=1.26.0,<1.27.0)"] -eks = ["mypy-boto3-eks (>=1.26.0,<1.27.0)"] -elastic-inference = ["mypy-boto3-elastic-inference (>=1.26.0,<1.27.0)"] -elasticache = ["mypy-boto3-elasticache (>=1.26.0,<1.27.0)"] -elasticbeanstalk = ["mypy-boto3-elasticbeanstalk (>=1.26.0,<1.27.0)"] -elastictranscoder = ["mypy-boto3-elastictranscoder (>=1.26.0,<1.27.0)"] -elb = ["mypy-boto3-elb (>=1.26.0,<1.27.0)"] -elbv2 = ["mypy-boto3-elbv2 (>=1.26.0,<1.27.0)"] -emr = ["mypy-boto3-emr (>=1.26.0,<1.27.0)"] -emr-containers = ["mypy-boto3-emr-containers (>=1.26.0,<1.27.0)"] -emr-serverless = ["mypy-boto3-emr-serverless (>=1.26.0,<1.27.0)"] -es = ["mypy-boto3-es (>=1.26.0,<1.27.0)"] -essential = ["mypy-boto3-cloudformation (>=1.26.0,<1.27.0)", "mypy-boto3-dynamodb (>=1.26.0,<1.27.0)", "mypy-boto3-ec2 (>=1.26.0,<1.27.0)", "mypy-boto3-lambda (>=1.26.0,<1.27.0)", "mypy-boto3-rds (>=1.26.0,<1.27.0)", "mypy-boto3-s3 (>=1.26.0,<1.27.0)", "mypy-boto3-sqs (>=1.26.0,<1.27.0)"] -events = ["mypy-boto3-events (>=1.26.0,<1.27.0)"] -evidently = ["mypy-boto3-evidently (>=1.26.0,<1.27.0)"] -finspace = ["mypy-boto3-finspace (>=1.26.0,<1.27.0)"] -finspace-data = ["mypy-boto3-finspace-data (>=1.26.0,<1.27.0)"] -firehose = ["mypy-boto3-firehose (>=1.26.0,<1.27.0)"] -fis = ["mypy-boto3-fis (>=1.26.0,<1.27.0)"] -fms = ["mypy-boto3-fms (>=1.26.0,<1.27.0)"] -forecast = ["mypy-boto3-forecast (>=1.26.0,<1.27.0)"] -forecastquery = ["mypy-boto3-forecastquery (>=1.26.0,<1.27.0)"] -frauddetector = ["mypy-boto3-frauddetector (>=1.26.0,<1.27.0)"] -fsx = ["mypy-boto3-fsx (>=1.26.0,<1.27.0)"] -gamelift = ["mypy-boto3-gamelift (>=1.26.0,<1.27.0)"] -gamesparks = ["mypy-boto3-gamesparks (>=1.26.0,<1.27.0)"] -glacier = ["mypy-boto3-glacier (>=1.26.0,<1.27.0)"] -globalaccelerator = ["mypy-boto3-globalaccelerator (>=1.26.0,<1.27.0)"] -glue = ["mypy-boto3-glue (>=1.26.0,<1.27.0)"] -grafana = ["mypy-boto3-grafana (>=1.26.0,<1.27.0)"] -greengrass = ["mypy-boto3-greengrass (>=1.26.0,<1.27.0)"] -greengrassv2 = ["mypy-boto3-greengrassv2 (>=1.26.0,<1.27.0)"] -groundstation = ["mypy-boto3-groundstation (>=1.26.0,<1.27.0)"] -guardduty = ["mypy-boto3-guardduty (>=1.26.0,<1.27.0)"] -health = ["mypy-boto3-health (>=1.26.0,<1.27.0)"] -healthlake = ["mypy-boto3-healthlake (>=1.26.0,<1.27.0)"] -honeycode = ["mypy-boto3-honeycode (>=1.26.0,<1.27.0)"] -iam = ["mypy-boto3-iam (>=1.26.0,<1.27.0)"] -identitystore = ["mypy-boto3-identitystore (>=1.26.0,<1.27.0)"] -imagebuilder = ["mypy-boto3-imagebuilder (>=1.26.0,<1.27.0)"] -importexport = ["mypy-boto3-importexport (>=1.26.0,<1.27.0)"] -inspector = ["mypy-boto3-inspector (>=1.26.0,<1.27.0)"] -inspector2 = ["mypy-boto3-inspector2 (>=1.26.0,<1.27.0)"] -internetmonitor = ["mypy-boto3-internetmonitor (>=1.26.0,<1.27.0)"] -iot = ["mypy-boto3-iot (>=1.26.0,<1.27.0)"] -iot-data = ["mypy-boto3-iot-data (>=1.26.0,<1.27.0)"] -iot-jobs-data = ["mypy-boto3-iot-jobs-data (>=1.26.0,<1.27.0)"] -iot-roborunner = ["mypy-boto3-iot-roborunner (>=1.26.0,<1.27.0)"] -iot1click-devices = ["mypy-boto3-iot1click-devices (>=1.26.0,<1.27.0)"] -iot1click-projects = ["mypy-boto3-iot1click-projects (>=1.26.0,<1.27.0)"] -iotanalytics = ["mypy-boto3-iotanalytics (>=1.26.0,<1.27.0)"] -iotdeviceadvisor = ["mypy-boto3-iotdeviceadvisor (>=1.26.0,<1.27.0)"] -iotevents = ["mypy-boto3-iotevents (>=1.26.0,<1.27.0)"] -iotevents-data = ["mypy-boto3-iotevents-data (>=1.26.0,<1.27.0)"] -iotfleethub = ["mypy-boto3-iotfleethub (>=1.26.0,<1.27.0)"] -iotfleetwise = ["mypy-boto3-iotfleetwise (>=1.26.0,<1.27.0)"] -iotsecuretunneling = ["mypy-boto3-iotsecuretunneling (>=1.26.0,<1.27.0)"] -iotsitewise = ["mypy-boto3-iotsitewise (>=1.26.0,<1.27.0)"] -iotthingsgraph = ["mypy-boto3-iotthingsgraph (>=1.26.0,<1.27.0)"] -iottwinmaker = ["mypy-boto3-iottwinmaker (>=1.26.0,<1.27.0)"] -iotwireless = ["mypy-boto3-iotwireless (>=1.26.0,<1.27.0)"] -ivs = ["mypy-boto3-ivs (>=1.26.0,<1.27.0)"] -ivs-realtime = ["mypy-boto3-ivs-realtime (>=1.26.0,<1.27.0)"] -ivschat = ["mypy-boto3-ivschat (>=1.26.0,<1.27.0)"] -kafka = ["mypy-boto3-kafka (>=1.26.0,<1.27.0)"] -kafkaconnect = ["mypy-boto3-kafkaconnect (>=1.26.0,<1.27.0)"] -kendra = ["mypy-boto3-kendra (>=1.26.0,<1.27.0)"] -kendra-ranking = ["mypy-boto3-kendra-ranking (>=1.26.0,<1.27.0)"] -keyspaces = ["mypy-boto3-keyspaces (>=1.26.0,<1.27.0)"] -kinesis = ["mypy-boto3-kinesis (>=1.26.0,<1.27.0)"] -kinesis-video-archived-media = ["mypy-boto3-kinesis-video-archived-media (>=1.26.0,<1.27.0)"] -kinesis-video-media = ["mypy-boto3-kinesis-video-media (>=1.26.0,<1.27.0)"] -kinesis-video-signaling = ["mypy-boto3-kinesis-video-signaling (>=1.26.0,<1.27.0)"] -kinesis-video-webrtc-storage = ["mypy-boto3-kinesis-video-webrtc-storage (>=1.26.0,<1.27.0)"] -kinesisanalytics = ["mypy-boto3-kinesisanalytics (>=1.26.0,<1.27.0)"] -kinesisanalyticsv2 = ["mypy-boto3-kinesisanalyticsv2 (>=1.26.0,<1.27.0)"] -kinesisvideo = ["mypy-boto3-kinesisvideo (>=1.26.0,<1.27.0)"] -kms = ["mypy-boto3-kms (>=1.26.0,<1.27.0)"] -lakeformation = ["mypy-boto3-lakeformation (>=1.26.0,<1.27.0)"] -lambda = ["mypy-boto3-lambda (>=1.26.0,<1.27.0)"] -lex-models = ["mypy-boto3-lex-models (>=1.26.0,<1.27.0)"] -lex-runtime = ["mypy-boto3-lex-runtime (>=1.26.0,<1.27.0)"] -lexv2-models = ["mypy-boto3-lexv2-models (>=1.26.0,<1.27.0)"] -lexv2-runtime = ["mypy-boto3-lexv2-runtime (>=1.26.0,<1.27.0)"] -license-manager = ["mypy-boto3-license-manager (>=1.26.0,<1.27.0)"] -license-manager-linux-subscriptions = ["mypy-boto3-license-manager-linux-subscriptions (>=1.26.0,<1.27.0)"] -license-manager-user-subscriptions = ["mypy-boto3-license-manager-user-subscriptions (>=1.26.0,<1.27.0)"] -lightsail = ["mypy-boto3-lightsail (>=1.26.0,<1.27.0)"] -location = ["mypy-boto3-location (>=1.26.0,<1.27.0)"] -logs = ["mypy-boto3-logs (>=1.26.0,<1.27.0)"] -lookoutequipment = ["mypy-boto3-lookoutequipment (>=1.26.0,<1.27.0)"] -lookoutmetrics = ["mypy-boto3-lookoutmetrics (>=1.26.0,<1.27.0)"] -lookoutvision = ["mypy-boto3-lookoutvision (>=1.26.0,<1.27.0)"] -m2 = ["mypy-boto3-m2 (>=1.26.0,<1.27.0)"] -machinelearning = ["mypy-boto3-machinelearning (>=1.26.0,<1.27.0)"] -macie = ["mypy-boto3-macie (>=1.26.0,<1.27.0)"] -macie2 = ["mypy-boto3-macie2 (>=1.26.0,<1.27.0)"] -managedblockchain = ["mypy-boto3-managedblockchain (>=1.26.0,<1.27.0)"] -marketplace-catalog = ["mypy-boto3-marketplace-catalog (>=1.26.0,<1.27.0)"] -marketplace-entitlement = ["mypy-boto3-marketplace-entitlement (>=1.26.0,<1.27.0)"] -marketplacecommerceanalytics = ["mypy-boto3-marketplacecommerceanalytics (>=1.26.0,<1.27.0)"] -mediaconnect = ["mypy-boto3-mediaconnect (>=1.26.0,<1.27.0)"] -mediaconvert = ["mypy-boto3-mediaconvert (>=1.26.0,<1.27.0)"] -medialive = ["mypy-boto3-medialive (>=1.26.0,<1.27.0)"] -mediapackage = ["mypy-boto3-mediapackage (>=1.26.0,<1.27.0)"] -mediapackage-vod = ["mypy-boto3-mediapackage-vod (>=1.26.0,<1.27.0)"] -mediastore = ["mypy-boto3-mediastore (>=1.26.0,<1.27.0)"] -mediastore-data = ["mypy-boto3-mediastore-data (>=1.26.0,<1.27.0)"] -mediatailor = ["mypy-boto3-mediatailor (>=1.26.0,<1.27.0)"] -memorydb = ["mypy-boto3-memorydb (>=1.26.0,<1.27.0)"] -meteringmarketplace = ["mypy-boto3-meteringmarketplace (>=1.26.0,<1.27.0)"] -mgh = ["mypy-boto3-mgh (>=1.26.0,<1.27.0)"] -mgn = ["mypy-boto3-mgn (>=1.26.0,<1.27.0)"] -migration-hub-refactor-spaces = ["mypy-boto3-migration-hub-refactor-spaces (>=1.26.0,<1.27.0)"] -migrationhub-config = ["mypy-boto3-migrationhub-config (>=1.26.0,<1.27.0)"] -migrationhuborchestrator = ["mypy-boto3-migrationhuborchestrator (>=1.26.0,<1.27.0)"] -migrationhubstrategy = ["mypy-boto3-migrationhubstrategy (>=1.26.0,<1.27.0)"] -mobile = ["mypy-boto3-mobile (>=1.26.0,<1.27.0)"] -mq = ["mypy-boto3-mq (>=1.26.0,<1.27.0)"] -mturk = ["mypy-boto3-mturk (>=1.26.0,<1.27.0)"] -mwaa = ["mypy-boto3-mwaa (>=1.26.0,<1.27.0)"] -neptune = ["mypy-boto3-neptune (>=1.26.0,<1.27.0)"] -network-firewall = ["mypy-boto3-network-firewall (>=1.26.0,<1.27.0)"] -networkmanager = ["mypy-boto3-networkmanager (>=1.26.0,<1.27.0)"] -nimble = ["mypy-boto3-nimble (>=1.26.0,<1.27.0)"] -oam = ["mypy-boto3-oam (>=1.26.0,<1.27.0)"] -omics = ["mypy-boto3-omics (>=1.26.0,<1.27.0)"] -opensearch = ["mypy-boto3-opensearch (>=1.26.0,<1.27.0)"] -opensearchserverless = ["mypy-boto3-opensearchserverless (>=1.26.0,<1.27.0)"] -opsworks = ["mypy-boto3-opsworks (>=1.26.0,<1.27.0)"] -opsworkscm = ["mypy-boto3-opsworkscm (>=1.26.0,<1.27.0)"] -organizations = ["mypy-boto3-organizations (>=1.26.0,<1.27.0)"] -osis = ["mypy-boto3-osis (>=1.26.0,<1.27.0)"] -outposts = ["mypy-boto3-outposts (>=1.26.0,<1.27.0)"] -panorama = ["mypy-boto3-panorama (>=1.26.0,<1.27.0)"] -personalize = ["mypy-boto3-personalize (>=1.26.0,<1.27.0)"] -personalize-events = ["mypy-boto3-personalize-events (>=1.26.0,<1.27.0)"] -personalize-runtime = ["mypy-boto3-personalize-runtime (>=1.26.0,<1.27.0)"] -pi = ["mypy-boto3-pi (>=1.26.0,<1.27.0)"] -pinpoint = ["mypy-boto3-pinpoint (>=1.26.0,<1.27.0)"] -pinpoint-email = ["mypy-boto3-pinpoint-email (>=1.26.0,<1.27.0)"] -pinpoint-sms-voice = ["mypy-boto3-pinpoint-sms-voice (>=1.26.0,<1.27.0)"] -pinpoint-sms-voice-v2 = ["mypy-boto3-pinpoint-sms-voice-v2 (>=1.26.0,<1.27.0)"] -pipes = ["mypy-boto3-pipes (>=1.26.0,<1.27.0)"] -polly = ["mypy-boto3-polly (>=1.26.0,<1.27.0)"] -pricing = ["mypy-boto3-pricing (>=1.26.0,<1.27.0)"] -privatenetworks = ["mypy-boto3-privatenetworks (>=1.26.0,<1.27.0)"] -proton = ["mypy-boto3-proton (>=1.26.0,<1.27.0)"] -qldb = ["mypy-boto3-qldb (>=1.26.0,<1.27.0)"] -qldb-session = ["mypy-boto3-qldb-session (>=1.26.0,<1.27.0)"] -quicksight = ["mypy-boto3-quicksight (>=1.26.0,<1.27.0)"] -ram = ["mypy-boto3-ram (>=1.26.0,<1.27.0)"] -rbin = ["mypy-boto3-rbin (>=1.26.0,<1.27.0)"] -rds = ["mypy-boto3-rds (>=1.26.0,<1.27.0)"] -rds-data = ["mypy-boto3-rds-data (>=1.26.0,<1.27.0)"] -redshift = ["mypy-boto3-redshift (>=1.26.0,<1.27.0)"] -redshift-data = ["mypy-boto3-redshift-data (>=1.26.0,<1.27.0)"] -redshift-serverless = ["mypy-boto3-redshift-serverless (>=1.26.0,<1.27.0)"] -rekognition = ["mypy-boto3-rekognition (>=1.26.0,<1.27.0)"] -resiliencehub = ["mypy-boto3-resiliencehub (>=1.26.0,<1.27.0)"] -resource-explorer-2 = ["mypy-boto3-resource-explorer-2 (>=1.26.0,<1.27.0)"] -resource-groups = ["mypy-boto3-resource-groups (>=1.26.0,<1.27.0)"] -resourcegroupstaggingapi = ["mypy-boto3-resourcegroupstaggingapi (>=1.26.0,<1.27.0)"] -robomaker = ["mypy-boto3-robomaker (>=1.26.0,<1.27.0)"] -rolesanywhere = ["mypy-boto3-rolesanywhere (>=1.26.0,<1.27.0)"] -route53 = ["mypy-boto3-route53 (>=1.26.0,<1.27.0)"] -route53-recovery-cluster = ["mypy-boto3-route53-recovery-cluster (>=1.26.0,<1.27.0)"] -route53-recovery-control-config = ["mypy-boto3-route53-recovery-control-config (>=1.26.0,<1.27.0)"] -route53-recovery-readiness = ["mypy-boto3-route53-recovery-readiness (>=1.26.0,<1.27.0)"] -route53domains = ["mypy-boto3-route53domains (>=1.26.0,<1.27.0)"] -route53resolver = ["mypy-boto3-route53resolver (>=1.26.0,<1.27.0)"] -rum = ["mypy-boto3-rum (>=1.26.0,<1.27.0)"] -s3 = ["mypy-boto3-s3 (>=1.26.0,<1.27.0)"] -s3control = ["mypy-boto3-s3control (>=1.26.0,<1.27.0)"] -s3outposts = ["mypy-boto3-s3outposts (>=1.26.0,<1.27.0)"] -sagemaker = ["mypy-boto3-sagemaker (>=1.26.0,<1.27.0)"] -sagemaker-a2i-runtime = ["mypy-boto3-sagemaker-a2i-runtime (>=1.26.0,<1.27.0)"] -sagemaker-edge = ["mypy-boto3-sagemaker-edge (>=1.26.0,<1.27.0)"] -sagemaker-featurestore-runtime = ["mypy-boto3-sagemaker-featurestore-runtime (>=1.26.0,<1.27.0)"] -sagemaker-geospatial = ["mypy-boto3-sagemaker-geospatial (>=1.26.0,<1.27.0)"] -sagemaker-metrics = ["mypy-boto3-sagemaker-metrics (>=1.26.0,<1.27.0)"] -sagemaker-runtime = ["mypy-boto3-sagemaker-runtime (>=1.26.0,<1.27.0)"] -savingsplans = ["mypy-boto3-savingsplans (>=1.26.0,<1.27.0)"] -scheduler = ["mypy-boto3-scheduler (>=1.26.0,<1.27.0)"] -schemas = ["mypy-boto3-schemas (>=1.26.0,<1.27.0)"] -sdb = ["mypy-boto3-sdb (>=1.26.0,<1.27.0)"] -secretsmanager = ["mypy-boto3-secretsmanager (>=1.26.0,<1.27.0)"] -securityhub = ["mypy-boto3-securityhub (>=1.26.0,<1.27.0)"] -securitylake = ["mypy-boto3-securitylake (>=1.26.0,<1.27.0)"] -serverlessrepo = ["mypy-boto3-serverlessrepo (>=1.26.0,<1.27.0)"] -service-quotas = ["mypy-boto3-service-quotas (>=1.26.0,<1.27.0)"] -servicecatalog = ["mypy-boto3-servicecatalog (>=1.26.0,<1.27.0)"] -servicecatalog-appregistry = ["mypy-boto3-servicecatalog-appregistry (>=1.26.0,<1.27.0)"] -servicediscovery = ["mypy-boto3-servicediscovery (>=1.26.0,<1.27.0)"] -ses = ["mypy-boto3-ses (>=1.26.0,<1.27.0)"] -sesv2 = ["mypy-boto3-sesv2 (>=1.26.0,<1.27.0)"] -shield = ["mypy-boto3-shield (>=1.26.0,<1.27.0)"] -signer = ["mypy-boto3-signer (>=1.26.0,<1.27.0)"] -simspaceweaver = ["mypy-boto3-simspaceweaver (>=1.26.0,<1.27.0)"] -sms = ["mypy-boto3-sms (>=1.26.0,<1.27.0)"] -sms-voice = ["mypy-boto3-sms-voice (>=1.26.0,<1.27.0)"] -snow-device-management = ["mypy-boto3-snow-device-management (>=1.26.0,<1.27.0)"] -snowball = ["mypy-boto3-snowball (>=1.26.0,<1.27.0)"] -sns = ["mypy-boto3-sns (>=1.26.0,<1.27.0)"] -sqs = ["mypy-boto3-sqs (>=1.26.0,<1.27.0)"] -ssm = ["mypy-boto3-ssm (>=1.26.0,<1.27.0)"] -ssm-contacts = ["mypy-boto3-ssm-contacts (>=1.26.0,<1.27.0)"] -ssm-incidents = ["mypy-boto3-ssm-incidents (>=1.26.0,<1.27.0)"] -ssm-sap = ["mypy-boto3-ssm-sap (>=1.26.0,<1.27.0)"] -sso = ["mypy-boto3-sso (>=1.26.0,<1.27.0)"] -sso-admin = ["mypy-boto3-sso-admin (>=1.26.0,<1.27.0)"] -sso-oidc = ["mypy-boto3-sso-oidc (>=1.26.0,<1.27.0)"] -stepfunctions = ["mypy-boto3-stepfunctions (>=1.26.0,<1.27.0)"] -storagegateway = ["mypy-boto3-storagegateway (>=1.26.0,<1.27.0)"] -sts = ["mypy-boto3-sts (>=1.26.0,<1.27.0)"] -support = ["mypy-boto3-support (>=1.26.0,<1.27.0)"] -support-app = ["mypy-boto3-support-app (>=1.26.0,<1.27.0)"] -swf = ["mypy-boto3-swf (>=1.26.0,<1.27.0)"] -synthetics = ["mypy-boto3-synthetics (>=1.26.0,<1.27.0)"] -textract = ["mypy-boto3-textract (>=1.26.0,<1.27.0)"] -timestream-query = ["mypy-boto3-timestream-query (>=1.26.0,<1.27.0)"] -timestream-write = ["mypy-boto3-timestream-write (>=1.26.0,<1.27.0)"] -tnb = ["mypy-boto3-tnb (>=1.26.0,<1.27.0)"] -transcribe = ["mypy-boto3-transcribe (>=1.26.0,<1.27.0)"] -transfer = ["mypy-boto3-transfer (>=1.26.0,<1.27.0)"] -translate = ["mypy-boto3-translate (>=1.26.0,<1.27.0)"] -voice-id = ["mypy-boto3-voice-id (>=1.26.0,<1.27.0)"] -vpc-lattice = ["mypy-boto3-vpc-lattice (>=1.26.0,<1.27.0)"] -waf = ["mypy-boto3-waf (>=1.26.0,<1.27.0)"] -waf-regional = ["mypy-boto3-waf-regional (>=1.26.0,<1.27.0)"] -wafv2 = ["mypy-boto3-wafv2 (>=1.26.0,<1.27.0)"] -wellarchitected = ["mypy-boto3-wellarchitected (>=1.26.0,<1.27.0)"] -wisdom = ["mypy-boto3-wisdom (>=1.26.0,<1.27.0)"] -workdocs = ["mypy-boto3-workdocs (>=1.26.0,<1.27.0)"] -worklink = ["mypy-boto3-worklink (>=1.26.0,<1.27.0)"] -workmail = ["mypy-boto3-workmail (>=1.26.0,<1.27.0)"] -workmailmessageflow = ["mypy-boto3-workmailmessageflow (>=1.26.0,<1.27.0)"] -workspaces = ["mypy-boto3-workspaces (>=1.26.0,<1.27.0)"] -workspaces-web = ["mypy-boto3-workspaces-web (>=1.26.0,<1.27.0)"] -xray = ["mypy-boto3-xray (>=1.26.0,<1.27.0)"] - [[package]] name = "botocore" version = "1.29.76" @@ -4299,156 +3923,6 @@ dmypy = ["psutil (>=4.0)"] python2 = ["typed-ast (>=1.4.0,<2)"] reports = ["lxml"] -[[package]] -name = "mypy-boto3-apigateway" -version = "1.26.40" -description = "Type annotations for boto3.APIGateway 1.26.40 service generated with mypy-boto3-builder 7.12.2" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "mypy-boto3-apigateway-1.26.40.tar.gz", hash = "sha256:5c7bab7c1f737e439027e00df09bd8ad8d7d7f90e10d335df0864bb0047f17d2"}, - {file = "mypy_boto3_apigateway-1.26.40-py3-none-any.whl", hash = "sha256:43b3e95707b4a610fea9966a385c10e50db822898a3ed58f959a3e11a073dd60"}, -] - -[package.dependencies] -typing-extensions = ">=4.1.0" - -[[package]] -name = "mypy-boto3-apigatewayv2" -version = "1.26.0.post1" -description = "Type annotations for boto3.ApiGatewayV2 1.26.0 service generated with mypy-boto3-builder 7.11.10" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "mypy-boto3-apigatewayv2-1.26.0.post1.tar.gz", hash = "sha256:ab929a60f4484af48adc692315c5e1889fd6a29f2d231fb213be404e03cfd211"}, - {file = "mypy_boto3_apigatewayv2-1.26.0.post1-py3-none-any.whl", hash = "sha256:3ea78eff466f6b52d8725a060d43ef614b7f6f46130573bd89df665b49b84a83"}, -] - -[package.dependencies] -typing-extensions = ">=4.1.0" - -[[package]] -name = "mypy-boto3-ec2" -version = "1.26.129" -description = "Type annotations for boto3.EC2 1.26.129 service generated with mypy-boto3-builder 7.14.5" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "mypy-boto3-ec2-1.26.129.tar.gz", hash = "sha256:b0ea692c77d9bf5a8a3a127d06e5fc0278e1adaad0a1bc338e9ff0a80547637c"}, - {file = "mypy_boto3_ec2-1.26.129-py3-none-any.whl", hash = "sha256:cb30dbe6bd4fed7060a03c925bac6c46c5d5880c2928fadffe3f3cb7881bcbd2"}, -] - -[[package]] -name = "mypy-boto3-ecs" -version = "1.26.127" -description = "Type annotations for boto3.ECS 1.26.127 service generated with mypy-boto3-builder 7.14.5" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "mypy-boto3-ecs-1.26.127.tar.gz", hash = "sha256:b70523af96267e9ce7e71eb20c4b0c6ed8057a172085c7dc5f5f4684301bae2d"}, - {file = "mypy_boto3_ecs-1.26.127-py3-none-any.whl", hash = "sha256:807e1c6d81dc5b3b64222f634511c682dfaead03e00f068eea88449326e3f9b1"}, -] - -[[package]] -name = "mypy-boto3-elb" -version = "1.26.0.post1" -description = "Type annotations for boto3.ElasticLoadBalancing 1.26.0 service generated with mypy-boto3-builder 7.11.10" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "mypy-boto3-elb-1.26.0.post1.tar.gz", hash = "sha256:ec2f51a9d08ad08789b49dd8b34d557d5d34389e763f620b4f508b6806de9a08"}, - {file = "mypy_boto3_elb-1.26.0.post1-py3-none-any.whl", hash = "sha256:cbe4576f970dc02e784f19e13f82613cc14849fca95db6c4de0d679cb4861c74"}, -] - -[package.dependencies] -typing-extensions = ">=4.1.0" - -[[package]] -name = "mypy-boto3-elbv2" -version = "1.26.63" -description = "Type annotations for boto3.ElasticLoadBalancingv2 1.26.63 service generated with mypy-boto3-builder 7.12.3" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "mypy-boto3-elbv2-1.26.63.tar.gz", hash = "sha256:27890289bf7ed2d410c6daa742cfc7ed30683796798ff588f943fda4904bdd48"}, - {file = "mypy_boto3_elbv2-1.26.63-py3-none-any.whl", hash = "sha256:a968c2f6bff48185a430f9b49d473c69ac49c68d17d6490f3201c39c390851fb"}, -] - -[package.dependencies] -typing-extensions = ">=4.1.0" - -[[package]] -name = "mypy-boto3-rds" -version = "1.26.116" -description = "Type annotations for boto3.RDS 1.26.116 service generated with mypy-boto3-builder 7.14.5" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "mypy-boto3-rds-1.26.116.tar.gz", hash = "sha256:00c7e91ecc3978a8dccfcbb4544cc6830cda4c2b16bf1928b33e0aa4850ae837"}, - {file = "mypy_boto3_rds-1.26.116-py3-none-any.whl", hash = "sha256:576ea6f37d8440a15d5c2fc55b3b7020ac8e087e69be0b98cc37258d80ce5d04"}, -] - -[[package]] -name = "mypy-boto3-route53" -version = "1.26.56" -description = "Type annotations for boto3.Route53 1.26.56 service generated with mypy-boto3-builder 7.12.3" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "mypy-boto3-route53-1.26.56.tar.gz", hash = "sha256:f5cc97e8bcdc75bb2270196ec2b9aa517100dd783ca8c90dbcd6afae726f3d7f"}, - {file = "mypy_boto3_route53-1.26.56-py3-none-any.whl", hash = "sha256:c2e5f10b3bfabf24f765eb6895d33c9b53924353cc65fa19ab140341c34ce48a"}, -] - -[package.dependencies] -typing-extensions = ">=4.1.0" - -[[package]] -name = "mypy-boto3-route53domains" -version = "1.26.34" -description = "Type annotations for boto3.Route53Domains 1.26.34 service generated with mypy-boto3-builder 7.12.0" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "mypy-boto3-route53domains-1.26.34.tar.gz", hash = "sha256:a106b928888eaa95d504714b1892817e70486f7a51ba2ef88a2335dbee379819"}, - {file = "mypy_boto3_route53domains-1.26.34-py3-none-any.whl", hash = "sha256:92bb4c5f5246b49cb9356f0f91843691c2c1591fe3243c2c4c96e97bc9501aff"}, -] - -[package.dependencies] -typing-extensions = ">=4.1.0" - -[[package]] -name = "mypy-boto3-s3" -version = "1.26.127" -description = "Type annotations for boto3.S3 1.26.127 service generated with mypy-boto3-builder 7.14.5" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "mypy-boto3-s3-1.26.127.tar.gz", hash = "sha256:0e548b97c6a2589f7bff5d26a1ca101622749771379226e3ad0822629d0613c5"}, - {file = "mypy_boto3_s3-1.26.127-py3-none-any.whl", hash = "sha256:21e647caa18d98dbbc706597c9b27d41674f18850f42b2cfdb9a39b39820e470"}, -] - -[[package]] -name = "mypy-boto3-sts" -version = "1.26.130" -description = "Type annotations for boto3.STS 1.26.130 service generated with mypy-boto3-builder 7.14.5" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "mypy-boto3-sts-1.26.130.tar.gz", hash = "sha256:9329053dc1aa015eb3083d5e086a1f652bf090ec108f0cfa20dd53f58ef2936e"}, - {file = "mypy_boto3_sts-1.26.130-py3-none-any.whl", hash = "sha256:55e672f30c0a5b4d6e3342673c4744a8f2b2c969d420695938a5f374a85d3911"}, -] - [[package]] name = "mypy-extensions" version = "1.0.0" @@ -5920,6 +5394,526 @@ files = [ docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] test = ["argcomplete (>=2.0)", "pre-commit", "pytest", "pytest-mock"] +[[package]] +name = "types-aiobotocore" +version = "2.5.0.post2" +description = "Type annotations for aiobotocore 2.5.0 generated with mypy-boto3-builder 7.14.5" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "types-aiobotocore-2.5.0.post2.tar.gz", hash = "sha256:e0e46a6834841db86083ce0dad3d5efbaebe0a43c18f783b0e8c95ea1f7e80c5"}, + {file = "types_aiobotocore-2.5.0.post2-py3-none-any.whl", hash = "sha256:75ddccdaf5cdf080a1a62cb74346f2704187a53b9ff1a8d63428c2e1de4e05f7"}, +] + +[package.dependencies] +botocore-stubs = "*" +types-aiobotocore-apigateway = {version = ">=2.5.0,<2.6.0", optional = true, markers = "extra == \"apigateway\""} +types-aiobotocore-apigatewayv2 = {version = ">=2.5.0,<2.6.0", optional = true, markers = "extra == \"apigatewayv2\""} +types-aiobotocore-ec2 = {version = ">=2.5.0,<2.6.0", optional = true, markers = "extra == \"ec2\""} +types-aiobotocore-ecs = {version = ">=2.5.0,<2.6.0", optional = true, markers = "extra == \"ecs\""} +types-aiobotocore-elb = {version = ">=2.5.0,<2.6.0", optional = true, markers = "extra == \"elb\""} +types-aiobotocore-elbv2 = {version = ">=2.5.0,<2.6.0", optional = true, markers = "extra == \"elbv2\""} +types-aiobotocore-rds = {version = ">=2.5.0,<2.6.0", optional = true, markers = "extra == \"rds\""} +types-aiobotocore-route53 = {version = ">=2.5.0,<2.6.0", optional = true, markers = "extra == \"route53\""} +types-aiobotocore-s3 = {version = ">=2.5.0,<2.6.0", optional = true, markers = "extra == \"s3\""} +types-aiobotocore-sts = {version = ">=2.5.0,<2.6.0", optional = true, markers = "extra == \"sts\""} + +[package.extras] +accessanalyzer = ["types-aiobotocore-accessanalyzer (>=2.5.0,<2.6.0)"] +account = ["types-aiobotocore-account (>=2.5.0,<2.6.0)"] +acm = ["types-aiobotocore-acm (>=2.5.0,<2.6.0)"] +acm-pca = ["types-aiobotocore-acm-pca (>=2.5.0,<2.6.0)"] +aiobotocore = ["aiobotocore (==2.5.0)", "botocore (==1.29.76)"] +alexaforbusiness = ["types-aiobotocore-alexaforbusiness (>=2.5.0,<2.6.0)"] +all = ["types-aiobotocore-accessanalyzer (>=2.5.0,<2.6.0)", "types-aiobotocore-account (>=2.5.0,<2.6.0)", "types-aiobotocore-acm (>=2.5.0,<2.6.0)", "types-aiobotocore-acm-pca (>=2.5.0,<2.6.0)", "types-aiobotocore-alexaforbusiness (>=2.5.0,<2.6.0)", "types-aiobotocore-amp (>=2.5.0,<2.6.0)", "types-aiobotocore-amplify (>=2.5.0,<2.6.0)", "types-aiobotocore-amplifybackend (>=2.5.0,<2.6.0)", "types-aiobotocore-amplifyuibuilder (>=2.5.0,<2.6.0)", "types-aiobotocore-apigateway (>=2.5.0,<2.6.0)", "types-aiobotocore-apigatewaymanagementapi (>=2.5.0,<2.6.0)", "types-aiobotocore-apigatewayv2 (>=2.5.0,<2.6.0)", "types-aiobotocore-appconfig (>=2.5.0,<2.6.0)", "types-aiobotocore-appconfigdata (>=2.5.0,<2.6.0)", "types-aiobotocore-appflow (>=2.5.0,<2.6.0)", "types-aiobotocore-appintegrations (>=2.5.0,<2.6.0)", "types-aiobotocore-application-autoscaling (>=2.5.0,<2.6.0)", "types-aiobotocore-application-insights (>=2.5.0,<2.6.0)", "types-aiobotocore-applicationcostprofiler (>=2.5.0,<2.6.0)", "types-aiobotocore-appmesh (>=2.5.0,<2.6.0)", "types-aiobotocore-apprunner (>=2.5.0,<2.6.0)", "types-aiobotocore-appstream (>=2.5.0,<2.6.0)", "types-aiobotocore-appsync (>=2.5.0,<2.6.0)", "types-aiobotocore-arc-zonal-shift (>=2.5.0,<2.6.0)", "types-aiobotocore-athena (>=2.5.0,<2.6.0)", "types-aiobotocore-auditmanager (>=2.5.0,<2.6.0)", "types-aiobotocore-autoscaling (>=2.5.0,<2.6.0)", "types-aiobotocore-autoscaling-plans (>=2.5.0,<2.6.0)", "types-aiobotocore-backup (>=2.5.0,<2.6.0)", "types-aiobotocore-backup-gateway (>=2.5.0,<2.6.0)", "types-aiobotocore-backupstorage (>=2.5.0,<2.6.0)", "types-aiobotocore-batch (>=2.5.0,<2.6.0)", "types-aiobotocore-billingconductor (>=2.5.0,<2.6.0)", "types-aiobotocore-braket (>=2.5.0,<2.6.0)", "types-aiobotocore-budgets (>=2.5.0,<2.6.0)", "types-aiobotocore-ce (>=2.5.0,<2.6.0)", "types-aiobotocore-chime (>=2.5.0,<2.6.0)", "types-aiobotocore-chime-sdk-identity (>=2.5.0,<2.6.0)", "types-aiobotocore-chime-sdk-media-pipelines (>=2.5.0,<2.6.0)", "types-aiobotocore-chime-sdk-meetings (>=2.5.0,<2.6.0)", "types-aiobotocore-chime-sdk-messaging (>=2.5.0,<2.6.0)", "types-aiobotocore-chime-sdk-voice (>=2.5.0,<2.6.0)", "types-aiobotocore-cleanrooms (>=2.5.0,<2.6.0)", "types-aiobotocore-cloud9 (>=2.5.0,<2.6.0)", "types-aiobotocore-cloudcontrol (>=2.5.0,<2.6.0)", "types-aiobotocore-clouddirectory (>=2.5.0,<2.6.0)", "types-aiobotocore-cloudformation (>=2.5.0,<2.6.0)", "types-aiobotocore-cloudfront (>=2.5.0,<2.6.0)", "types-aiobotocore-cloudhsm (>=2.5.0,<2.6.0)", "types-aiobotocore-cloudhsmv2 (>=2.5.0,<2.6.0)", "types-aiobotocore-cloudsearch (>=2.5.0,<2.6.0)", "types-aiobotocore-cloudsearchdomain (>=2.5.0,<2.6.0)", "types-aiobotocore-cloudtrail (>=2.5.0,<2.6.0)", "types-aiobotocore-cloudtrail-data (>=2.5.0,<2.6.0)", "types-aiobotocore-cloudwatch (>=2.5.0,<2.6.0)", "types-aiobotocore-codeartifact (>=2.5.0,<2.6.0)", "types-aiobotocore-codebuild (>=2.5.0,<2.6.0)", "types-aiobotocore-codecatalyst (>=2.5.0,<2.6.0)", "types-aiobotocore-codecommit (>=2.5.0,<2.6.0)", "types-aiobotocore-codedeploy (>=2.5.0,<2.6.0)", "types-aiobotocore-codeguru-reviewer (>=2.5.0,<2.6.0)", "types-aiobotocore-codeguruprofiler (>=2.5.0,<2.6.0)", "types-aiobotocore-codepipeline (>=2.5.0,<2.6.0)", "types-aiobotocore-codestar (>=2.5.0,<2.6.0)", "types-aiobotocore-codestar-connections (>=2.5.0,<2.6.0)", "types-aiobotocore-codestar-notifications (>=2.5.0,<2.6.0)", "types-aiobotocore-cognito-identity (>=2.5.0,<2.6.0)", "types-aiobotocore-cognito-idp (>=2.5.0,<2.6.0)", "types-aiobotocore-cognito-sync (>=2.5.0,<2.6.0)", "types-aiobotocore-comprehend (>=2.5.0,<2.6.0)", "types-aiobotocore-comprehendmedical (>=2.5.0,<2.6.0)", "types-aiobotocore-compute-optimizer (>=2.5.0,<2.6.0)", "types-aiobotocore-config (>=2.5.0,<2.6.0)", "types-aiobotocore-connect (>=2.5.0,<2.6.0)", "types-aiobotocore-connect-contact-lens (>=2.5.0,<2.6.0)", "types-aiobotocore-connectcampaigns (>=2.5.0,<2.6.0)", "types-aiobotocore-connectcases (>=2.5.0,<2.6.0)", "types-aiobotocore-connectparticipant (>=2.5.0,<2.6.0)", "types-aiobotocore-controltower (>=2.5.0,<2.6.0)", "types-aiobotocore-cur (>=2.5.0,<2.6.0)", "types-aiobotocore-customer-profiles (>=2.5.0,<2.6.0)", "types-aiobotocore-databrew (>=2.5.0,<2.6.0)", "types-aiobotocore-dataexchange (>=2.5.0,<2.6.0)", "types-aiobotocore-datapipeline (>=2.5.0,<2.6.0)", "types-aiobotocore-datasync (>=2.5.0,<2.6.0)", "types-aiobotocore-dax (>=2.5.0,<2.6.0)", "types-aiobotocore-detective (>=2.5.0,<2.6.0)", "types-aiobotocore-devicefarm (>=2.5.0,<2.6.0)", "types-aiobotocore-devops-guru (>=2.5.0,<2.6.0)", "types-aiobotocore-directconnect (>=2.5.0,<2.6.0)", "types-aiobotocore-discovery (>=2.5.0,<2.6.0)", "types-aiobotocore-dlm (>=2.5.0,<2.6.0)", "types-aiobotocore-dms (>=2.5.0,<2.6.0)", "types-aiobotocore-docdb (>=2.5.0,<2.6.0)", "types-aiobotocore-docdb-elastic (>=2.5.0,<2.6.0)", "types-aiobotocore-drs (>=2.5.0,<2.6.0)", "types-aiobotocore-ds (>=2.5.0,<2.6.0)", "types-aiobotocore-dynamodb (>=2.5.0,<2.6.0)", "types-aiobotocore-dynamodbstreams (>=2.5.0,<2.6.0)", "types-aiobotocore-ebs (>=2.5.0,<2.6.0)", "types-aiobotocore-ec2 (>=2.5.0,<2.6.0)", "types-aiobotocore-ec2-instance-connect (>=2.5.0,<2.6.0)", "types-aiobotocore-ecr (>=2.5.0,<2.6.0)", "types-aiobotocore-ecr-public (>=2.5.0,<2.6.0)", "types-aiobotocore-ecs (>=2.5.0,<2.6.0)", "types-aiobotocore-efs (>=2.5.0,<2.6.0)", "types-aiobotocore-eks (>=2.5.0,<2.6.0)", "types-aiobotocore-elastic-inference (>=2.5.0,<2.6.0)", "types-aiobotocore-elasticache (>=2.5.0,<2.6.0)", "types-aiobotocore-elasticbeanstalk (>=2.5.0,<2.6.0)", "types-aiobotocore-elastictranscoder (>=2.5.0,<2.6.0)", "types-aiobotocore-elb (>=2.5.0,<2.6.0)", "types-aiobotocore-elbv2 (>=2.5.0,<2.6.0)", "types-aiobotocore-emr (>=2.5.0,<2.6.0)", "types-aiobotocore-emr-containers (>=2.5.0,<2.6.0)", "types-aiobotocore-emr-serverless (>=2.5.0,<2.6.0)", "types-aiobotocore-es (>=2.5.0,<2.6.0)", "types-aiobotocore-events (>=2.5.0,<2.6.0)", "types-aiobotocore-evidently (>=2.5.0,<2.6.0)", "types-aiobotocore-finspace (>=2.5.0,<2.6.0)", "types-aiobotocore-finspace-data (>=2.5.0,<2.6.0)", "types-aiobotocore-firehose (>=2.5.0,<2.6.0)", "types-aiobotocore-fis (>=2.5.0,<2.6.0)", "types-aiobotocore-fms (>=2.5.0,<2.6.0)", "types-aiobotocore-forecast (>=2.5.0,<2.6.0)", "types-aiobotocore-forecastquery (>=2.5.0,<2.6.0)", "types-aiobotocore-frauddetector (>=2.5.0,<2.6.0)", "types-aiobotocore-fsx (>=2.5.0,<2.6.0)", "types-aiobotocore-gamelift (>=2.5.0,<2.6.0)", "types-aiobotocore-gamesparks (>=2.5.0,<2.6.0)", "types-aiobotocore-glacier (>=2.5.0,<2.6.0)", "types-aiobotocore-globalaccelerator (>=2.5.0,<2.6.0)", "types-aiobotocore-glue (>=2.5.0,<2.6.0)", "types-aiobotocore-grafana (>=2.5.0,<2.6.0)", "types-aiobotocore-greengrass (>=2.5.0,<2.6.0)", "types-aiobotocore-greengrassv2 (>=2.5.0,<2.6.0)", "types-aiobotocore-groundstation (>=2.5.0,<2.6.0)", "types-aiobotocore-guardduty (>=2.5.0,<2.6.0)", "types-aiobotocore-health (>=2.5.0,<2.6.0)", "types-aiobotocore-healthlake (>=2.5.0,<2.6.0)", "types-aiobotocore-honeycode (>=2.5.0,<2.6.0)", "types-aiobotocore-iam (>=2.5.0,<2.6.0)", "types-aiobotocore-identitystore (>=2.5.0,<2.6.0)", "types-aiobotocore-imagebuilder (>=2.5.0,<2.6.0)", "types-aiobotocore-importexport (>=2.5.0,<2.6.0)", "types-aiobotocore-inspector (>=2.5.0,<2.6.0)", "types-aiobotocore-inspector2 (>=2.5.0,<2.6.0)", "types-aiobotocore-iot (>=2.5.0,<2.6.0)", "types-aiobotocore-iot-data (>=2.5.0,<2.6.0)", "types-aiobotocore-iot-jobs-data (>=2.5.0,<2.6.0)", "types-aiobotocore-iot-roborunner (>=2.5.0,<2.6.0)", "types-aiobotocore-iot1click-devices (>=2.5.0,<2.6.0)", "types-aiobotocore-iot1click-projects (>=2.5.0,<2.6.0)", "types-aiobotocore-iotanalytics (>=2.5.0,<2.6.0)", "types-aiobotocore-iotdeviceadvisor (>=2.5.0,<2.6.0)", "types-aiobotocore-iotevents (>=2.5.0,<2.6.0)", "types-aiobotocore-iotevents-data (>=2.5.0,<2.6.0)", "types-aiobotocore-iotfleethub (>=2.5.0,<2.6.0)", "types-aiobotocore-iotfleetwise (>=2.5.0,<2.6.0)", "types-aiobotocore-iotsecuretunneling (>=2.5.0,<2.6.0)", "types-aiobotocore-iotsitewise (>=2.5.0,<2.6.0)", "types-aiobotocore-iotthingsgraph (>=2.5.0,<2.6.0)", "types-aiobotocore-iottwinmaker (>=2.5.0,<2.6.0)", "types-aiobotocore-iotwireless (>=2.5.0,<2.6.0)", "types-aiobotocore-ivs (>=2.5.0,<2.6.0)", "types-aiobotocore-ivschat (>=2.5.0,<2.6.0)", "types-aiobotocore-kafka (>=2.5.0,<2.6.0)", "types-aiobotocore-kafkaconnect (>=2.5.0,<2.6.0)", "types-aiobotocore-kendra (>=2.5.0,<2.6.0)", "types-aiobotocore-kendra-ranking (>=2.5.0,<2.6.0)", "types-aiobotocore-keyspaces (>=2.5.0,<2.6.0)", "types-aiobotocore-kinesis (>=2.5.0,<2.6.0)", "types-aiobotocore-kinesis-video-archived-media (>=2.5.0,<2.6.0)", "types-aiobotocore-kinesis-video-media (>=2.5.0,<2.6.0)", "types-aiobotocore-kinesis-video-signaling (>=2.5.0,<2.6.0)", "types-aiobotocore-kinesis-video-webrtc-storage (>=2.5.0,<2.6.0)", "types-aiobotocore-kinesisanalytics (>=2.5.0,<2.6.0)", "types-aiobotocore-kinesisanalyticsv2 (>=2.5.0,<2.6.0)", "types-aiobotocore-kinesisvideo (>=2.5.0,<2.6.0)", "types-aiobotocore-kms (>=2.5.0,<2.6.0)", "types-aiobotocore-lakeformation (>=2.5.0,<2.6.0)", "types-aiobotocore-lambda (>=2.5.0,<2.6.0)", "types-aiobotocore-lex-models (>=2.5.0,<2.6.0)", "types-aiobotocore-lex-runtime (>=2.5.0,<2.6.0)", "types-aiobotocore-lexv2-models (>=2.5.0,<2.6.0)", "types-aiobotocore-lexv2-runtime (>=2.5.0,<2.6.0)", "types-aiobotocore-license-manager (>=2.5.0,<2.6.0)", "types-aiobotocore-license-manager-linux-subscriptions (>=2.5.0,<2.6.0)", "types-aiobotocore-license-manager-user-subscriptions (>=2.5.0,<2.6.0)", "types-aiobotocore-lightsail (>=2.5.0,<2.6.0)", "types-aiobotocore-location (>=2.5.0,<2.6.0)", "types-aiobotocore-logs (>=2.5.0,<2.6.0)", "types-aiobotocore-lookoutequipment (>=2.5.0,<2.6.0)", "types-aiobotocore-lookoutmetrics (>=2.5.0,<2.6.0)", "types-aiobotocore-lookoutvision (>=2.5.0,<2.6.0)", "types-aiobotocore-m2 (>=2.5.0,<2.6.0)", "types-aiobotocore-machinelearning (>=2.5.0,<2.6.0)", "types-aiobotocore-macie (>=2.5.0,<2.6.0)", "types-aiobotocore-macie2 (>=2.5.0,<2.6.0)", "types-aiobotocore-managedblockchain (>=2.5.0,<2.6.0)", "types-aiobotocore-marketplace-catalog (>=2.5.0,<2.6.0)", "types-aiobotocore-marketplace-entitlement (>=2.5.0,<2.6.0)", "types-aiobotocore-marketplacecommerceanalytics (>=2.5.0,<2.6.0)", "types-aiobotocore-mediaconnect (>=2.5.0,<2.6.0)", "types-aiobotocore-mediaconvert (>=2.5.0,<2.6.0)", "types-aiobotocore-medialive (>=2.5.0,<2.6.0)", "types-aiobotocore-mediapackage (>=2.5.0,<2.6.0)", "types-aiobotocore-mediapackage-vod (>=2.5.0,<2.6.0)", "types-aiobotocore-mediastore (>=2.5.0,<2.6.0)", "types-aiobotocore-mediastore-data (>=2.5.0,<2.6.0)", "types-aiobotocore-mediatailor (>=2.5.0,<2.6.0)", "types-aiobotocore-memorydb (>=2.5.0,<2.6.0)", "types-aiobotocore-meteringmarketplace (>=2.5.0,<2.6.0)", "types-aiobotocore-mgh (>=2.5.0,<2.6.0)", "types-aiobotocore-mgn (>=2.5.0,<2.6.0)", "types-aiobotocore-migration-hub-refactor-spaces (>=2.5.0,<2.6.0)", "types-aiobotocore-migrationhub-config (>=2.5.0,<2.6.0)", "types-aiobotocore-migrationhuborchestrator (>=2.5.0,<2.6.0)", "types-aiobotocore-migrationhubstrategy (>=2.5.0,<2.6.0)", "types-aiobotocore-mobile (>=2.5.0,<2.6.0)", "types-aiobotocore-mq (>=2.5.0,<2.6.0)", "types-aiobotocore-mturk (>=2.5.0,<2.6.0)", "types-aiobotocore-mwaa (>=2.5.0,<2.6.0)", "types-aiobotocore-neptune (>=2.5.0,<2.6.0)", "types-aiobotocore-network-firewall (>=2.5.0,<2.6.0)", "types-aiobotocore-networkmanager (>=2.5.0,<2.6.0)", "types-aiobotocore-nimble (>=2.5.0,<2.6.0)", "types-aiobotocore-oam (>=2.5.0,<2.6.0)", "types-aiobotocore-omics (>=2.5.0,<2.6.0)", "types-aiobotocore-opensearch (>=2.5.0,<2.6.0)", "types-aiobotocore-opensearchserverless (>=2.5.0,<2.6.0)", "types-aiobotocore-opsworks (>=2.5.0,<2.6.0)", "types-aiobotocore-opsworkscm (>=2.5.0,<2.6.0)", "types-aiobotocore-organizations (>=2.5.0,<2.6.0)", "types-aiobotocore-outposts (>=2.5.0,<2.6.0)", "types-aiobotocore-panorama (>=2.5.0,<2.6.0)", "types-aiobotocore-personalize (>=2.5.0,<2.6.0)", "types-aiobotocore-personalize-events (>=2.5.0,<2.6.0)", "types-aiobotocore-personalize-runtime (>=2.5.0,<2.6.0)", "types-aiobotocore-pi (>=2.5.0,<2.6.0)", "types-aiobotocore-pinpoint (>=2.5.0,<2.6.0)", "types-aiobotocore-pinpoint-email (>=2.5.0,<2.6.0)", "types-aiobotocore-pinpoint-sms-voice (>=2.5.0,<2.6.0)", "types-aiobotocore-pinpoint-sms-voice-v2 (>=2.5.0,<2.6.0)", "types-aiobotocore-pipes (>=2.5.0,<2.6.0)", "types-aiobotocore-polly (>=2.5.0,<2.6.0)", "types-aiobotocore-pricing (>=2.5.0,<2.6.0)", "types-aiobotocore-privatenetworks (>=2.5.0,<2.6.0)", "types-aiobotocore-proton (>=2.5.0,<2.6.0)", "types-aiobotocore-qldb (>=2.5.0,<2.6.0)", "types-aiobotocore-qldb-session (>=2.5.0,<2.6.0)", "types-aiobotocore-quicksight (>=2.5.0,<2.6.0)", "types-aiobotocore-ram (>=2.5.0,<2.6.0)", "types-aiobotocore-rbin (>=2.5.0,<2.6.0)", "types-aiobotocore-rds (>=2.5.0,<2.6.0)", "types-aiobotocore-rds-data (>=2.5.0,<2.6.0)", "types-aiobotocore-redshift (>=2.5.0,<2.6.0)", "types-aiobotocore-redshift-data (>=2.5.0,<2.6.0)", "types-aiobotocore-redshift-serverless (>=2.5.0,<2.6.0)", "types-aiobotocore-rekognition (>=2.5.0,<2.6.0)", "types-aiobotocore-resiliencehub (>=2.5.0,<2.6.0)", "types-aiobotocore-resource-explorer-2 (>=2.5.0,<2.6.0)", "types-aiobotocore-resource-groups (>=2.5.0,<2.6.0)", "types-aiobotocore-resourcegroupstaggingapi (>=2.5.0,<2.6.0)", "types-aiobotocore-robomaker (>=2.5.0,<2.6.0)", "types-aiobotocore-rolesanywhere (>=2.5.0,<2.6.0)", "types-aiobotocore-route53 (>=2.5.0,<2.6.0)", "types-aiobotocore-route53-recovery-cluster (>=2.5.0,<2.6.0)", "types-aiobotocore-route53-recovery-control-config (>=2.5.0,<2.6.0)", "types-aiobotocore-route53-recovery-readiness (>=2.5.0,<2.6.0)", "types-aiobotocore-route53domains (>=2.5.0,<2.6.0)", "types-aiobotocore-route53resolver (>=2.5.0,<2.6.0)", "types-aiobotocore-rum (>=2.5.0,<2.6.0)", "types-aiobotocore-s3 (>=2.5.0,<2.6.0)", "types-aiobotocore-s3control (>=2.5.0,<2.6.0)", "types-aiobotocore-s3outposts (>=2.5.0,<2.6.0)", "types-aiobotocore-sagemaker (>=2.5.0,<2.6.0)", "types-aiobotocore-sagemaker-a2i-runtime (>=2.5.0,<2.6.0)", "types-aiobotocore-sagemaker-edge (>=2.5.0,<2.6.0)", "types-aiobotocore-sagemaker-featurestore-runtime (>=2.5.0,<2.6.0)", "types-aiobotocore-sagemaker-geospatial (>=2.5.0,<2.6.0)", "types-aiobotocore-sagemaker-metrics (>=2.5.0,<2.6.0)", "types-aiobotocore-sagemaker-runtime (>=2.5.0,<2.6.0)", "types-aiobotocore-savingsplans (>=2.5.0,<2.6.0)", "types-aiobotocore-scheduler (>=2.5.0,<2.6.0)", "types-aiobotocore-schemas (>=2.5.0,<2.6.0)", "types-aiobotocore-sdb (>=2.5.0,<2.6.0)", "types-aiobotocore-secretsmanager (>=2.5.0,<2.6.0)", "types-aiobotocore-securityhub (>=2.5.0,<2.6.0)", "types-aiobotocore-securitylake (>=2.5.0,<2.6.0)", "types-aiobotocore-serverlessrepo (>=2.5.0,<2.6.0)", "types-aiobotocore-service-quotas (>=2.5.0,<2.6.0)", "types-aiobotocore-servicecatalog (>=2.5.0,<2.6.0)", "types-aiobotocore-servicecatalog-appregistry (>=2.5.0,<2.6.0)", "types-aiobotocore-servicediscovery (>=2.5.0,<2.6.0)", "types-aiobotocore-ses (>=2.5.0,<2.6.0)", "types-aiobotocore-sesv2 (>=2.5.0,<2.6.0)", "types-aiobotocore-shield (>=2.5.0,<2.6.0)", "types-aiobotocore-signer (>=2.5.0,<2.6.0)", "types-aiobotocore-simspaceweaver (>=2.5.0,<2.6.0)", "types-aiobotocore-sms (>=2.5.0,<2.6.0)", "types-aiobotocore-sms-voice (>=2.5.0,<2.6.0)", "types-aiobotocore-snow-device-management (>=2.5.0,<2.6.0)", "types-aiobotocore-snowball (>=2.5.0,<2.6.0)", "types-aiobotocore-sns (>=2.5.0,<2.6.0)", "types-aiobotocore-sqs (>=2.5.0,<2.6.0)", "types-aiobotocore-ssm (>=2.5.0,<2.6.0)", "types-aiobotocore-ssm-contacts (>=2.5.0,<2.6.0)", "types-aiobotocore-ssm-incidents (>=2.5.0,<2.6.0)", "types-aiobotocore-ssm-sap (>=2.5.0,<2.6.0)", "types-aiobotocore-sso (>=2.5.0,<2.6.0)", "types-aiobotocore-sso-admin (>=2.5.0,<2.6.0)", "types-aiobotocore-sso-oidc (>=2.5.0,<2.6.0)", "types-aiobotocore-stepfunctions (>=2.5.0,<2.6.0)", "types-aiobotocore-storagegateway (>=2.5.0,<2.6.0)", "types-aiobotocore-sts (>=2.5.0,<2.6.0)", "types-aiobotocore-support (>=2.5.0,<2.6.0)", "types-aiobotocore-support-app (>=2.5.0,<2.6.0)", "types-aiobotocore-swf (>=2.5.0,<2.6.0)", "types-aiobotocore-synthetics (>=2.5.0,<2.6.0)", "types-aiobotocore-textract (>=2.5.0,<2.6.0)", "types-aiobotocore-timestream-query (>=2.5.0,<2.6.0)", "types-aiobotocore-timestream-write (>=2.5.0,<2.6.0)", "types-aiobotocore-tnb (>=2.5.0,<2.6.0)", "types-aiobotocore-transcribe (>=2.5.0,<2.6.0)", "types-aiobotocore-transfer (>=2.5.0,<2.6.0)", "types-aiobotocore-translate (>=2.5.0,<2.6.0)", "types-aiobotocore-voice-id (>=2.5.0,<2.6.0)", "types-aiobotocore-waf (>=2.5.0,<2.6.0)", "types-aiobotocore-waf-regional (>=2.5.0,<2.6.0)", "types-aiobotocore-wafv2 (>=2.5.0,<2.6.0)", "types-aiobotocore-wellarchitected (>=2.5.0,<2.6.0)", "types-aiobotocore-wisdom (>=2.5.0,<2.6.0)", "types-aiobotocore-workdocs (>=2.5.0,<2.6.0)", "types-aiobotocore-worklink (>=2.5.0,<2.6.0)", "types-aiobotocore-workmail (>=2.5.0,<2.6.0)", "types-aiobotocore-workmailmessageflow (>=2.5.0,<2.6.0)", "types-aiobotocore-workspaces (>=2.5.0,<2.6.0)", "types-aiobotocore-workspaces-web (>=2.5.0,<2.6.0)", "types-aiobotocore-xray (>=2.5.0,<2.6.0)"] +amp = ["types-aiobotocore-amp (>=2.5.0,<2.6.0)"] +amplify = ["types-aiobotocore-amplify (>=2.5.0,<2.6.0)"] +amplifybackend = ["types-aiobotocore-amplifybackend (>=2.5.0,<2.6.0)"] +amplifyuibuilder = ["types-aiobotocore-amplifyuibuilder (>=2.5.0,<2.6.0)"] +apigateway = ["types-aiobotocore-apigateway (>=2.5.0,<2.6.0)"] +apigatewaymanagementapi = ["types-aiobotocore-apigatewaymanagementapi (>=2.5.0,<2.6.0)"] +apigatewayv2 = ["types-aiobotocore-apigatewayv2 (>=2.5.0,<2.6.0)"] +appconfig = ["types-aiobotocore-appconfig (>=2.5.0,<2.6.0)"] +appconfigdata = ["types-aiobotocore-appconfigdata (>=2.5.0,<2.6.0)"] +appflow = ["types-aiobotocore-appflow (>=2.5.0,<2.6.0)"] +appintegrations = ["types-aiobotocore-appintegrations (>=2.5.0,<2.6.0)"] +application-autoscaling = ["types-aiobotocore-application-autoscaling (>=2.5.0,<2.6.0)"] +application-insights = ["types-aiobotocore-application-insights (>=2.5.0,<2.6.0)"] +applicationcostprofiler = ["types-aiobotocore-applicationcostprofiler (>=2.5.0,<2.6.0)"] +appmesh = ["types-aiobotocore-appmesh (>=2.5.0,<2.6.0)"] +apprunner = ["types-aiobotocore-apprunner (>=2.5.0,<2.6.0)"] +appstream = ["types-aiobotocore-appstream (>=2.5.0,<2.6.0)"] +appsync = ["types-aiobotocore-appsync (>=2.5.0,<2.6.0)"] +arc-zonal-shift = ["types-aiobotocore-arc-zonal-shift (>=2.5.0,<2.6.0)"] +athena = ["types-aiobotocore-athena (>=2.5.0,<2.6.0)"] +auditmanager = ["types-aiobotocore-auditmanager (>=2.5.0,<2.6.0)"] +autoscaling = ["types-aiobotocore-autoscaling (>=2.5.0,<2.6.0)"] +autoscaling-plans = ["types-aiobotocore-autoscaling-plans (>=2.5.0,<2.6.0)"] +backup = ["types-aiobotocore-backup (>=2.5.0,<2.6.0)"] +backup-gateway = ["types-aiobotocore-backup-gateway (>=2.5.0,<2.6.0)"] +backupstorage = ["types-aiobotocore-backupstorage (>=2.5.0,<2.6.0)"] +batch = ["types-aiobotocore-batch (>=2.5.0,<2.6.0)"] +billingconductor = ["types-aiobotocore-billingconductor (>=2.5.0,<2.6.0)"] +braket = ["types-aiobotocore-braket (>=2.5.0,<2.6.0)"] +budgets = ["types-aiobotocore-budgets (>=2.5.0,<2.6.0)"] +ce = ["types-aiobotocore-ce (>=2.5.0,<2.6.0)"] +chime = ["types-aiobotocore-chime (>=2.5.0,<2.6.0)"] +chime-sdk-identity = ["types-aiobotocore-chime-sdk-identity (>=2.5.0,<2.6.0)"] +chime-sdk-media-pipelines = ["types-aiobotocore-chime-sdk-media-pipelines (>=2.5.0,<2.6.0)"] +chime-sdk-meetings = ["types-aiobotocore-chime-sdk-meetings (>=2.5.0,<2.6.0)"] +chime-sdk-messaging = ["types-aiobotocore-chime-sdk-messaging (>=2.5.0,<2.6.0)"] +chime-sdk-voice = ["types-aiobotocore-chime-sdk-voice (>=2.5.0,<2.6.0)"] +cleanrooms = ["types-aiobotocore-cleanrooms (>=2.5.0,<2.6.0)"] +cloud9 = ["types-aiobotocore-cloud9 (>=2.5.0,<2.6.0)"] +cloudcontrol = ["types-aiobotocore-cloudcontrol (>=2.5.0,<2.6.0)"] +clouddirectory = ["types-aiobotocore-clouddirectory (>=2.5.0,<2.6.0)"] +cloudformation = ["types-aiobotocore-cloudformation (>=2.5.0,<2.6.0)"] +cloudfront = ["types-aiobotocore-cloudfront (>=2.5.0,<2.6.0)"] +cloudhsm = ["types-aiobotocore-cloudhsm (>=2.5.0,<2.6.0)"] +cloudhsmv2 = ["types-aiobotocore-cloudhsmv2 (>=2.5.0,<2.6.0)"] +cloudsearch = ["types-aiobotocore-cloudsearch (>=2.5.0,<2.6.0)"] +cloudsearchdomain = ["types-aiobotocore-cloudsearchdomain (>=2.5.0,<2.6.0)"] +cloudtrail = ["types-aiobotocore-cloudtrail (>=2.5.0,<2.6.0)"] +cloudtrail-data = ["types-aiobotocore-cloudtrail-data (>=2.5.0,<2.6.0)"] +cloudwatch = ["types-aiobotocore-cloudwatch (>=2.5.0,<2.6.0)"] +codeartifact = ["types-aiobotocore-codeartifact (>=2.5.0,<2.6.0)"] +codebuild = ["types-aiobotocore-codebuild (>=2.5.0,<2.6.0)"] +codecatalyst = ["types-aiobotocore-codecatalyst (>=2.5.0,<2.6.0)"] +codecommit = ["types-aiobotocore-codecommit (>=2.5.0,<2.6.0)"] +codedeploy = ["types-aiobotocore-codedeploy (>=2.5.0,<2.6.0)"] +codeguru-reviewer = ["types-aiobotocore-codeguru-reviewer (>=2.5.0,<2.6.0)"] +codeguruprofiler = ["types-aiobotocore-codeguruprofiler (>=2.5.0,<2.6.0)"] +codepipeline = ["types-aiobotocore-codepipeline (>=2.5.0,<2.6.0)"] +codestar = ["types-aiobotocore-codestar (>=2.5.0,<2.6.0)"] +codestar-connections = ["types-aiobotocore-codestar-connections (>=2.5.0,<2.6.0)"] +codestar-notifications = ["types-aiobotocore-codestar-notifications (>=2.5.0,<2.6.0)"] +cognito-identity = ["types-aiobotocore-cognito-identity (>=2.5.0,<2.6.0)"] +cognito-idp = ["types-aiobotocore-cognito-idp (>=2.5.0,<2.6.0)"] +cognito-sync = ["types-aiobotocore-cognito-sync (>=2.5.0,<2.6.0)"] +comprehend = ["types-aiobotocore-comprehend (>=2.5.0,<2.6.0)"] +comprehendmedical = ["types-aiobotocore-comprehendmedical (>=2.5.0,<2.6.0)"] +compute-optimizer = ["types-aiobotocore-compute-optimizer (>=2.5.0,<2.6.0)"] +config = ["types-aiobotocore-config (>=2.5.0,<2.6.0)"] +connect = ["types-aiobotocore-connect (>=2.5.0,<2.6.0)"] +connect-contact-lens = ["types-aiobotocore-connect-contact-lens (>=2.5.0,<2.6.0)"] +connectcampaigns = ["types-aiobotocore-connectcampaigns (>=2.5.0,<2.6.0)"] +connectcases = ["types-aiobotocore-connectcases (>=2.5.0,<2.6.0)"] +connectparticipant = ["types-aiobotocore-connectparticipant (>=2.5.0,<2.6.0)"] +controltower = ["types-aiobotocore-controltower (>=2.5.0,<2.6.0)"] +cur = ["types-aiobotocore-cur (>=2.5.0,<2.6.0)"] +customer-profiles = ["types-aiobotocore-customer-profiles (>=2.5.0,<2.6.0)"] +databrew = ["types-aiobotocore-databrew (>=2.5.0,<2.6.0)"] +dataexchange = ["types-aiobotocore-dataexchange (>=2.5.0,<2.6.0)"] +datapipeline = ["types-aiobotocore-datapipeline (>=2.5.0,<2.6.0)"] +datasync = ["types-aiobotocore-datasync (>=2.5.0,<2.6.0)"] +dax = ["types-aiobotocore-dax (>=2.5.0,<2.6.0)"] +detective = ["types-aiobotocore-detective (>=2.5.0,<2.6.0)"] +devicefarm = ["types-aiobotocore-devicefarm (>=2.5.0,<2.6.0)"] +devops-guru = ["types-aiobotocore-devops-guru (>=2.5.0,<2.6.0)"] +directconnect = ["types-aiobotocore-directconnect (>=2.5.0,<2.6.0)"] +discovery = ["types-aiobotocore-discovery (>=2.5.0,<2.6.0)"] +dlm = ["types-aiobotocore-dlm (>=2.5.0,<2.6.0)"] +dms = ["types-aiobotocore-dms (>=2.5.0,<2.6.0)"] +docdb = ["types-aiobotocore-docdb (>=2.5.0,<2.6.0)"] +docdb-elastic = ["types-aiobotocore-docdb-elastic (>=2.5.0,<2.6.0)"] +drs = ["types-aiobotocore-drs (>=2.5.0,<2.6.0)"] +ds = ["types-aiobotocore-ds (>=2.5.0,<2.6.0)"] +dynamodb = ["types-aiobotocore-dynamodb (>=2.5.0,<2.6.0)"] +dynamodbstreams = ["types-aiobotocore-dynamodbstreams (>=2.5.0,<2.6.0)"] +ebs = ["types-aiobotocore-ebs (>=2.5.0,<2.6.0)"] +ec2 = ["types-aiobotocore-ec2 (>=2.5.0,<2.6.0)"] +ec2-instance-connect = ["types-aiobotocore-ec2-instance-connect (>=2.5.0,<2.6.0)"] +ecr = ["types-aiobotocore-ecr (>=2.5.0,<2.6.0)"] +ecr-public = ["types-aiobotocore-ecr-public (>=2.5.0,<2.6.0)"] +ecs = ["types-aiobotocore-ecs (>=2.5.0,<2.6.0)"] +efs = ["types-aiobotocore-efs (>=2.5.0,<2.6.0)"] +eks = ["types-aiobotocore-eks (>=2.5.0,<2.6.0)"] +elastic-inference = ["types-aiobotocore-elastic-inference (>=2.5.0,<2.6.0)"] +elasticache = ["types-aiobotocore-elasticache (>=2.5.0,<2.6.0)"] +elasticbeanstalk = ["types-aiobotocore-elasticbeanstalk (>=2.5.0,<2.6.0)"] +elastictranscoder = ["types-aiobotocore-elastictranscoder (>=2.5.0,<2.6.0)"] +elb = ["types-aiobotocore-elb (>=2.5.0,<2.6.0)"] +elbv2 = ["types-aiobotocore-elbv2 (>=2.5.0,<2.6.0)"] +emr = ["types-aiobotocore-emr (>=2.5.0,<2.6.0)"] +emr-containers = ["types-aiobotocore-emr-containers (>=2.5.0,<2.6.0)"] +emr-serverless = ["types-aiobotocore-emr-serverless (>=2.5.0,<2.6.0)"] +es = ["types-aiobotocore-es (>=2.5.0,<2.6.0)"] +essential = ["types-aiobotocore-cloudformation (>=2.5.0,<2.6.0)", "types-aiobotocore-dynamodb (>=2.5.0,<2.6.0)", "types-aiobotocore-ec2 (>=2.5.0,<2.6.0)", "types-aiobotocore-lambda (>=2.5.0,<2.6.0)", "types-aiobotocore-rds (>=2.5.0,<2.6.0)", "types-aiobotocore-s3 (>=2.5.0,<2.6.0)", "types-aiobotocore-sqs (>=2.5.0,<2.6.0)"] +events = ["types-aiobotocore-events (>=2.5.0,<2.6.0)"] +evidently = ["types-aiobotocore-evidently (>=2.5.0,<2.6.0)"] +finspace = ["types-aiobotocore-finspace (>=2.5.0,<2.6.0)"] +finspace-data = ["types-aiobotocore-finspace-data (>=2.5.0,<2.6.0)"] +firehose = ["types-aiobotocore-firehose (>=2.5.0,<2.6.0)"] +fis = ["types-aiobotocore-fis (>=2.5.0,<2.6.0)"] +fms = ["types-aiobotocore-fms (>=2.5.0,<2.6.0)"] +forecast = ["types-aiobotocore-forecast (>=2.5.0,<2.6.0)"] +forecastquery = ["types-aiobotocore-forecastquery (>=2.5.0,<2.6.0)"] +frauddetector = ["types-aiobotocore-frauddetector (>=2.5.0,<2.6.0)"] +fsx = ["types-aiobotocore-fsx (>=2.5.0,<2.6.0)"] +gamelift = ["types-aiobotocore-gamelift (>=2.5.0,<2.6.0)"] +gamesparks = ["types-aiobotocore-gamesparks (>=2.5.0,<2.6.0)"] +glacier = ["types-aiobotocore-glacier (>=2.5.0,<2.6.0)"] +globalaccelerator = ["types-aiobotocore-globalaccelerator (>=2.5.0,<2.6.0)"] +glue = ["types-aiobotocore-glue (>=2.5.0,<2.6.0)"] +grafana = ["types-aiobotocore-grafana (>=2.5.0,<2.6.0)"] +greengrass = ["types-aiobotocore-greengrass (>=2.5.0,<2.6.0)"] +greengrassv2 = ["types-aiobotocore-greengrassv2 (>=2.5.0,<2.6.0)"] +groundstation = ["types-aiobotocore-groundstation (>=2.5.0,<2.6.0)"] +guardduty = ["types-aiobotocore-guardduty (>=2.5.0,<2.6.0)"] +health = ["types-aiobotocore-health (>=2.5.0,<2.6.0)"] +healthlake = ["types-aiobotocore-healthlake (>=2.5.0,<2.6.0)"] +honeycode = ["types-aiobotocore-honeycode (>=2.5.0,<2.6.0)"] +iam = ["types-aiobotocore-iam (>=2.5.0,<2.6.0)"] +identitystore = ["types-aiobotocore-identitystore (>=2.5.0,<2.6.0)"] +imagebuilder = ["types-aiobotocore-imagebuilder (>=2.5.0,<2.6.0)"] +importexport = ["types-aiobotocore-importexport (>=2.5.0,<2.6.0)"] +inspector = ["types-aiobotocore-inspector (>=2.5.0,<2.6.0)"] +inspector2 = ["types-aiobotocore-inspector2 (>=2.5.0,<2.6.0)"] +iot = ["types-aiobotocore-iot (>=2.5.0,<2.6.0)"] +iot-data = ["types-aiobotocore-iot-data (>=2.5.0,<2.6.0)"] +iot-jobs-data = ["types-aiobotocore-iot-jobs-data (>=2.5.0,<2.6.0)"] +iot-roborunner = ["types-aiobotocore-iot-roborunner (>=2.5.0,<2.6.0)"] +iot1click-devices = ["types-aiobotocore-iot1click-devices (>=2.5.0,<2.6.0)"] +iot1click-projects = ["types-aiobotocore-iot1click-projects (>=2.5.0,<2.6.0)"] +iotanalytics = ["types-aiobotocore-iotanalytics (>=2.5.0,<2.6.0)"] +iotdeviceadvisor = ["types-aiobotocore-iotdeviceadvisor (>=2.5.0,<2.6.0)"] +iotevents = ["types-aiobotocore-iotevents (>=2.5.0,<2.6.0)"] +iotevents-data = ["types-aiobotocore-iotevents-data (>=2.5.0,<2.6.0)"] +iotfleethub = ["types-aiobotocore-iotfleethub (>=2.5.0,<2.6.0)"] +iotfleetwise = ["types-aiobotocore-iotfleetwise (>=2.5.0,<2.6.0)"] +iotsecuretunneling = ["types-aiobotocore-iotsecuretunneling (>=2.5.0,<2.6.0)"] +iotsitewise = ["types-aiobotocore-iotsitewise (>=2.5.0,<2.6.0)"] +iotthingsgraph = ["types-aiobotocore-iotthingsgraph (>=2.5.0,<2.6.0)"] +iottwinmaker = ["types-aiobotocore-iottwinmaker (>=2.5.0,<2.6.0)"] +iotwireless = ["types-aiobotocore-iotwireless (>=2.5.0,<2.6.0)"] +ivs = ["types-aiobotocore-ivs (>=2.5.0,<2.6.0)"] +ivschat = ["types-aiobotocore-ivschat (>=2.5.0,<2.6.0)"] +kafka = ["types-aiobotocore-kafka (>=2.5.0,<2.6.0)"] +kafkaconnect = ["types-aiobotocore-kafkaconnect (>=2.5.0,<2.6.0)"] +kendra = ["types-aiobotocore-kendra (>=2.5.0,<2.6.0)"] +kendra-ranking = ["types-aiobotocore-kendra-ranking (>=2.5.0,<2.6.0)"] +keyspaces = ["types-aiobotocore-keyspaces (>=2.5.0,<2.6.0)"] +kinesis = ["types-aiobotocore-kinesis (>=2.5.0,<2.6.0)"] +kinesis-video-archived-media = ["types-aiobotocore-kinesis-video-archived-media (>=2.5.0,<2.6.0)"] +kinesis-video-media = ["types-aiobotocore-kinesis-video-media (>=2.5.0,<2.6.0)"] +kinesis-video-signaling = ["types-aiobotocore-kinesis-video-signaling (>=2.5.0,<2.6.0)"] +kinesis-video-webrtc-storage = ["types-aiobotocore-kinesis-video-webrtc-storage (>=2.5.0,<2.6.0)"] +kinesisanalytics = ["types-aiobotocore-kinesisanalytics (>=2.5.0,<2.6.0)"] +kinesisanalyticsv2 = ["types-aiobotocore-kinesisanalyticsv2 (>=2.5.0,<2.6.0)"] +kinesisvideo = ["types-aiobotocore-kinesisvideo (>=2.5.0,<2.6.0)"] +kms = ["types-aiobotocore-kms (>=2.5.0,<2.6.0)"] +lakeformation = ["types-aiobotocore-lakeformation (>=2.5.0,<2.6.0)"] +lambda = ["types-aiobotocore-lambda (>=2.5.0,<2.6.0)"] +lex-models = ["types-aiobotocore-lex-models (>=2.5.0,<2.6.0)"] +lex-runtime = ["types-aiobotocore-lex-runtime (>=2.5.0,<2.6.0)"] +lexv2-models = ["types-aiobotocore-lexv2-models (>=2.5.0,<2.6.0)"] +lexv2-runtime = ["types-aiobotocore-lexv2-runtime (>=2.5.0,<2.6.0)"] +license-manager = ["types-aiobotocore-license-manager (>=2.5.0,<2.6.0)"] +license-manager-linux-subscriptions = ["types-aiobotocore-license-manager-linux-subscriptions (>=2.5.0,<2.6.0)"] +license-manager-user-subscriptions = ["types-aiobotocore-license-manager-user-subscriptions (>=2.5.0,<2.6.0)"] +lightsail = ["types-aiobotocore-lightsail (>=2.5.0,<2.6.0)"] +location = ["types-aiobotocore-location (>=2.5.0,<2.6.0)"] +logs = ["types-aiobotocore-logs (>=2.5.0,<2.6.0)"] +lookoutequipment = ["types-aiobotocore-lookoutequipment (>=2.5.0,<2.6.0)"] +lookoutmetrics = ["types-aiobotocore-lookoutmetrics (>=2.5.0,<2.6.0)"] +lookoutvision = ["types-aiobotocore-lookoutvision (>=2.5.0,<2.6.0)"] +m2 = ["types-aiobotocore-m2 (>=2.5.0,<2.6.0)"] +machinelearning = ["types-aiobotocore-machinelearning (>=2.5.0,<2.6.0)"] +macie = ["types-aiobotocore-macie (>=2.5.0,<2.6.0)"] +macie2 = ["types-aiobotocore-macie2 (>=2.5.0,<2.6.0)"] +managedblockchain = ["types-aiobotocore-managedblockchain (>=2.5.0,<2.6.0)"] +marketplace-catalog = ["types-aiobotocore-marketplace-catalog (>=2.5.0,<2.6.0)"] +marketplace-entitlement = ["types-aiobotocore-marketplace-entitlement (>=2.5.0,<2.6.0)"] +marketplacecommerceanalytics = ["types-aiobotocore-marketplacecommerceanalytics (>=2.5.0,<2.6.0)"] +mediaconnect = ["types-aiobotocore-mediaconnect (>=2.5.0,<2.6.0)"] +mediaconvert = ["types-aiobotocore-mediaconvert (>=2.5.0,<2.6.0)"] +medialive = ["types-aiobotocore-medialive (>=2.5.0,<2.6.0)"] +mediapackage = ["types-aiobotocore-mediapackage (>=2.5.0,<2.6.0)"] +mediapackage-vod = ["types-aiobotocore-mediapackage-vod (>=2.5.0,<2.6.0)"] +mediastore = ["types-aiobotocore-mediastore (>=2.5.0,<2.6.0)"] +mediastore-data = ["types-aiobotocore-mediastore-data (>=2.5.0,<2.6.0)"] +mediatailor = ["types-aiobotocore-mediatailor (>=2.5.0,<2.6.0)"] +memorydb = ["types-aiobotocore-memorydb (>=2.5.0,<2.6.0)"] +meteringmarketplace = ["types-aiobotocore-meteringmarketplace (>=2.5.0,<2.6.0)"] +mgh = ["types-aiobotocore-mgh (>=2.5.0,<2.6.0)"] +mgn = ["types-aiobotocore-mgn (>=2.5.0,<2.6.0)"] +migration-hub-refactor-spaces = ["types-aiobotocore-migration-hub-refactor-spaces (>=2.5.0,<2.6.0)"] +migrationhub-config = ["types-aiobotocore-migrationhub-config (>=2.5.0,<2.6.0)"] +migrationhuborchestrator = ["types-aiobotocore-migrationhuborchestrator (>=2.5.0,<2.6.0)"] +migrationhubstrategy = ["types-aiobotocore-migrationhubstrategy (>=2.5.0,<2.6.0)"] +mobile = ["types-aiobotocore-mobile (>=2.5.0,<2.6.0)"] +mq = ["types-aiobotocore-mq (>=2.5.0,<2.6.0)"] +mturk = ["types-aiobotocore-mturk (>=2.5.0,<2.6.0)"] +mwaa = ["types-aiobotocore-mwaa (>=2.5.0,<2.6.0)"] +neptune = ["types-aiobotocore-neptune (>=2.5.0,<2.6.0)"] +network-firewall = ["types-aiobotocore-network-firewall (>=2.5.0,<2.6.0)"] +networkmanager = ["types-aiobotocore-networkmanager (>=2.5.0,<2.6.0)"] +nimble = ["types-aiobotocore-nimble (>=2.5.0,<2.6.0)"] +oam = ["types-aiobotocore-oam (>=2.5.0,<2.6.0)"] +omics = ["types-aiobotocore-omics (>=2.5.0,<2.6.0)"] +opensearch = ["types-aiobotocore-opensearch (>=2.5.0,<2.6.0)"] +opensearchserverless = ["types-aiobotocore-opensearchserverless (>=2.5.0,<2.6.0)"] +opsworks = ["types-aiobotocore-opsworks (>=2.5.0,<2.6.0)"] +opsworkscm = ["types-aiobotocore-opsworkscm (>=2.5.0,<2.6.0)"] +organizations = ["types-aiobotocore-organizations (>=2.5.0,<2.6.0)"] +outposts = ["types-aiobotocore-outposts (>=2.5.0,<2.6.0)"] +panorama = ["types-aiobotocore-panorama (>=2.5.0,<2.6.0)"] +personalize = ["types-aiobotocore-personalize (>=2.5.0,<2.6.0)"] +personalize-events = ["types-aiobotocore-personalize-events (>=2.5.0,<2.6.0)"] +personalize-runtime = ["types-aiobotocore-personalize-runtime (>=2.5.0,<2.6.0)"] +pi = ["types-aiobotocore-pi (>=2.5.0,<2.6.0)"] +pinpoint = ["types-aiobotocore-pinpoint (>=2.5.0,<2.6.0)"] +pinpoint-email = ["types-aiobotocore-pinpoint-email (>=2.5.0,<2.6.0)"] +pinpoint-sms-voice = ["types-aiobotocore-pinpoint-sms-voice (>=2.5.0,<2.6.0)"] +pinpoint-sms-voice-v2 = ["types-aiobotocore-pinpoint-sms-voice-v2 (>=2.5.0,<2.6.0)"] +pipes = ["types-aiobotocore-pipes (>=2.5.0,<2.6.0)"] +polly = ["types-aiobotocore-polly (>=2.5.0,<2.6.0)"] +pricing = ["types-aiobotocore-pricing (>=2.5.0,<2.6.0)"] +privatenetworks = ["types-aiobotocore-privatenetworks (>=2.5.0,<2.6.0)"] +proton = ["types-aiobotocore-proton (>=2.5.0,<2.6.0)"] +qldb = ["types-aiobotocore-qldb (>=2.5.0,<2.6.0)"] +qldb-session = ["types-aiobotocore-qldb-session (>=2.5.0,<2.6.0)"] +quicksight = ["types-aiobotocore-quicksight (>=2.5.0,<2.6.0)"] +ram = ["types-aiobotocore-ram (>=2.5.0,<2.6.0)"] +rbin = ["types-aiobotocore-rbin (>=2.5.0,<2.6.0)"] +rds = ["types-aiobotocore-rds (>=2.5.0,<2.6.0)"] +rds-data = ["types-aiobotocore-rds-data (>=2.5.0,<2.6.0)"] +redshift = ["types-aiobotocore-redshift (>=2.5.0,<2.6.0)"] +redshift-data = ["types-aiobotocore-redshift-data (>=2.5.0,<2.6.0)"] +redshift-serverless = ["types-aiobotocore-redshift-serverless (>=2.5.0,<2.6.0)"] +rekognition = ["types-aiobotocore-rekognition (>=2.5.0,<2.6.0)"] +resiliencehub = ["types-aiobotocore-resiliencehub (>=2.5.0,<2.6.0)"] +resource-explorer-2 = ["types-aiobotocore-resource-explorer-2 (>=2.5.0,<2.6.0)"] +resource-groups = ["types-aiobotocore-resource-groups (>=2.5.0,<2.6.0)"] +resourcegroupstaggingapi = ["types-aiobotocore-resourcegroupstaggingapi (>=2.5.0,<2.6.0)"] +robomaker = ["types-aiobotocore-robomaker (>=2.5.0,<2.6.0)"] +rolesanywhere = ["types-aiobotocore-rolesanywhere (>=2.5.0,<2.6.0)"] +route53 = ["types-aiobotocore-route53 (>=2.5.0,<2.6.0)"] +route53-recovery-cluster = ["types-aiobotocore-route53-recovery-cluster (>=2.5.0,<2.6.0)"] +route53-recovery-control-config = ["types-aiobotocore-route53-recovery-control-config (>=2.5.0,<2.6.0)"] +route53-recovery-readiness = ["types-aiobotocore-route53-recovery-readiness (>=2.5.0,<2.6.0)"] +route53domains = ["types-aiobotocore-route53domains (>=2.5.0,<2.6.0)"] +route53resolver = ["types-aiobotocore-route53resolver (>=2.5.0,<2.6.0)"] +rum = ["types-aiobotocore-rum (>=2.5.0,<2.6.0)"] +s3 = ["types-aiobotocore-s3 (>=2.5.0,<2.6.0)"] +s3control = ["types-aiobotocore-s3control (>=2.5.0,<2.6.0)"] +s3outposts = ["types-aiobotocore-s3outposts (>=2.5.0,<2.6.0)"] +sagemaker = ["types-aiobotocore-sagemaker (>=2.5.0,<2.6.0)"] +sagemaker-a2i-runtime = ["types-aiobotocore-sagemaker-a2i-runtime (>=2.5.0,<2.6.0)"] +sagemaker-edge = ["types-aiobotocore-sagemaker-edge (>=2.5.0,<2.6.0)"] +sagemaker-featurestore-runtime = ["types-aiobotocore-sagemaker-featurestore-runtime (>=2.5.0,<2.6.0)"] +sagemaker-geospatial = ["types-aiobotocore-sagemaker-geospatial (>=2.5.0,<2.6.0)"] +sagemaker-metrics = ["types-aiobotocore-sagemaker-metrics (>=2.5.0,<2.6.0)"] +sagemaker-runtime = ["types-aiobotocore-sagemaker-runtime (>=2.5.0,<2.6.0)"] +savingsplans = ["types-aiobotocore-savingsplans (>=2.5.0,<2.6.0)"] +scheduler = ["types-aiobotocore-scheduler (>=2.5.0,<2.6.0)"] +schemas = ["types-aiobotocore-schemas (>=2.5.0,<2.6.0)"] +sdb = ["types-aiobotocore-sdb (>=2.5.0,<2.6.0)"] +secretsmanager = ["types-aiobotocore-secretsmanager (>=2.5.0,<2.6.0)"] +securityhub = ["types-aiobotocore-securityhub (>=2.5.0,<2.6.0)"] +securitylake = ["types-aiobotocore-securitylake (>=2.5.0,<2.6.0)"] +serverlessrepo = ["types-aiobotocore-serverlessrepo (>=2.5.0,<2.6.0)"] +service-quotas = ["types-aiobotocore-service-quotas (>=2.5.0,<2.6.0)"] +servicecatalog = ["types-aiobotocore-servicecatalog (>=2.5.0,<2.6.0)"] +servicecatalog-appregistry = ["types-aiobotocore-servicecatalog-appregistry (>=2.5.0,<2.6.0)"] +servicediscovery = ["types-aiobotocore-servicediscovery (>=2.5.0,<2.6.0)"] +ses = ["types-aiobotocore-ses (>=2.5.0,<2.6.0)"] +sesv2 = ["types-aiobotocore-sesv2 (>=2.5.0,<2.6.0)"] +shield = ["types-aiobotocore-shield (>=2.5.0,<2.6.0)"] +signer = ["types-aiobotocore-signer (>=2.5.0,<2.6.0)"] +simspaceweaver = ["types-aiobotocore-simspaceweaver (>=2.5.0,<2.6.0)"] +sms = ["types-aiobotocore-sms (>=2.5.0,<2.6.0)"] +sms-voice = ["types-aiobotocore-sms-voice (>=2.5.0,<2.6.0)"] +snow-device-management = ["types-aiobotocore-snow-device-management (>=2.5.0,<2.6.0)"] +snowball = ["types-aiobotocore-snowball (>=2.5.0,<2.6.0)"] +sns = ["types-aiobotocore-sns (>=2.5.0,<2.6.0)"] +sqs = ["types-aiobotocore-sqs (>=2.5.0,<2.6.0)"] +ssm = ["types-aiobotocore-ssm (>=2.5.0,<2.6.0)"] +ssm-contacts = ["types-aiobotocore-ssm-contacts (>=2.5.0,<2.6.0)"] +ssm-incidents = ["types-aiobotocore-ssm-incidents (>=2.5.0,<2.6.0)"] +ssm-sap = ["types-aiobotocore-ssm-sap (>=2.5.0,<2.6.0)"] +sso = ["types-aiobotocore-sso (>=2.5.0,<2.6.0)"] +sso-admin = ["types-aiobotocore-sso-admin (>=2.5.0,<2.6.0)"] +sso-oidc = ["types-aiobotocore-sso-oidc (>=2.5.0,<2.6.0)"] +stepfunctions = ["types-aiobotocore-stepfunctions (>=2.5.0,<2.6.0)"] +storagegateway = ["types-aiobotocore-storagegateway (>=2.5.0,<2.6.0)"] +sts = ["types-aiobotocore-sts (>=2.5.0,<2.6.0)"] +support = ["types-aiobotocore-support (>=2.5.0,<2.6.0)"] +support-app = ["types-aiobotocore-support-app (>=2.5.0,<2.6.0)"] +swf = ["types-aiobotocore-swf (>=2.5.0,<2.6.0)"] +synthetics = ["types-aiobotocore-synthetics (>=2.5.0,<2.6.0)"] +textract = ["types-aiobotocore-textract (>=2.5.0,<2.6.0)"] +timestream-query = ["types-aiobotocore-timestream-query (>=2.5.0,<2.6.0)"] +timestream-write = ["types-aiobotocore-timestream-write (>=2.5.0,<2.6.0)"] +tnb = ["types-aiobotocore-tnb (>=2.5.0,<2.6.0)"] +transcribe = ["types-aiobotocore-transcribe (>=2.5.0,<2.6.0)"] +transfer = ["types-aiobotocore-transfer (>=2.5.0,<2.6.0)"] +translate = ["types-aiobotocore-translate (>=2.5.0,<2.6.0)"] +voice-id = ["types-aiobotocore-voice-id (>=2.5.0,<2.6.0)"] +waf = ["types-aiobotocore-waf (>=2.5.0,<2.6.0)"] +waf-regional = ["types-aiobotocore-waf-regional (>=2.5.0,<2.6.0)"] +wafv2 = ["types-aiobotocore-wafv2 (>=2.5.0,<2.6.0)"] +wellarchitected = ["types-aiobotocore-wellarchitected (>=2.5.0,<2.6.0)"] +wisdom = ["types-aiobotocore-wisdom (>=2.5.0,<2.6.0)"] +workdocs = ["types-aiobotocore-workdocs (>=2.5.0,<2.6.0)"] +worklink = ["types-aiobotocore-worklink (>=2.5.0,<2.6.0)"] +workmail = ["types-aiobotocore-workmail (>=2.5.0,<2.6.0)"] +workmailmessageflow = ["types-aiobotocore-workmailmessageflow (>=2.5.0,<2.6.0)"] +workspaces = ["types-aiobotocore-workspaces (>=2.5.0,<2.6.0)"] +workspaces-web = ["types-aiobotocore-workspaces-web (>=2.5.0,<2.6.0)"] +xray = ["types-aiobotocore-xray (>=2.5.0,<2.6.0)"] + +[[package]] +name = "types-aiobotocore-apigateway" +version = "2.5.0.post1" +description = "Type annotations for aiobotocore.APIGateway 2.5.0 service generated with mypy-boto3-builder 7.13.0" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "types-aiobotocore-apigateway-2.5.0.post1.tar.gz", hash = "sha256:3ed991ec693900bf1b38a7c36854b60252e54a741d00362703b9fcc461bf0fc7"}, + {file = "types_aiobotocore_apigateway-2.5.0.post1-py3-none-any.whl", hash = "sha256:03dd241213cceb52474631d76b58776fafb2ea86d28d2b7760ccd13ff8e844b2"}, +] + +[package.dependencies] +typing-extensions = ">=4.1.0" + +[[package]] +name = "types-aiobotocore-apigatewayv2" +version = "2.5.0.post1" +description = "Type annotations for aiobotocore.ApiGatewayV2 2.5.0 service generated with mypy-boto3-builder 7.13.0" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "types-aiobotocore-apigatewayv2-2.5.0.post1.tar.gz", hash = "sha256:8f28629d4130a69ff4fa17d8850fcb458f1f7fce335f6763d413b3a1787c9288"}, + {file = "types_aiobotocore_apigatewayv2-2.5.0.post1-py3-none-any.whl", hash = "sha256:967e2492bcb322023539c0ceaf490bb49a2820e8074d76000c487d4024da5b08"}, +] + +[package.dependencies] +typing-extensions = ">=4.1.0" + +[[package]] +name = "types-aiobotocore-ec2" +version = "2.5.0.post1" +description = "Type annotations for aiobotocore.EC2 2.5.0 service generated with mypy-boto3-builder 7.13.0" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "types-aiobotocore-ec2-2.5.0.post1.tar.gz", hash = "sha256:b652f8e65c603af93db9bcc321f86a9e7d9bea79377386a1558850557eaf5288"}, + {file = "types_aiobotocore_ec2-2.5.0.post1-py3-none-any.whl", hash = "sha256:72663c2e8f2f05e9255d6416201d44f32ccc28237f0ea352447dff61ced356be"}, +] + +[package.dependencies] +typing-extensions = ">=4.1.0" + +[[package]] +name = "types-aiobotocore-ecs" +version = "2.5.0.post1" +description = "Type annotations for aiobotocore.ECS 2.5.0 service generated with mypy-boto3-builder 7.13.0" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "types-aiobotocore-ecs-2.5.0.post1.tar.gz", hash = "sha256:5d2bd2f616ea329dee7d430b8ff23dc574e2bc1d060dc0f67a860d0b8b6bf235"}, + {file = "types_aiobotocore_ecs-2.5.0.post1-py3-none-any.whl", hash = "sha256:baf54d8b5f38d068e73e459da6d26cb14746ffe17eccc478e1b956b0ae90ef0a"}, +] + +[package.dependencies] +typing-extensions = ">=4.1.0" + +[[package]] +name = "types-aiobotocore-elb" +version = "2.5.0.post1" +description = "Type annotations for aiobotocore.ElasticLoadBalancing 2.5.0 service generated with mypy-boto3-builder 7.13.0" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "types-aiobotocore-elb-2.5.0.post1.tar.gz", hash = "sha256:77402ebbb489b88e9fc2579b65d8cbab39b70ae2ed25417e94e0dd57a8cbe00e"}, + {file = "types_aiobotocore_elb-2.5.0.post1-py3-none-any.whl", hash = "sha256:1e8f3a429e6505b2eb2c92948a7efd62fc643eaa9282a72aa5be3dee3545ee22"}, +] + +[package.dependencies] +typing-extensions = ">=4.1.0" + +[[package]] +name = "types-aiobotocore-elbv2" +version = "2.5.0.post1" +description = "Type annotations for aiobotocore.ElasticLoadBalancingv2 2.5.0 service generated with mypy-boto3-builder 7.13.0" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "types-aiobotocore-elbv2-2.5.0.post1.tar.gz", hash = "sha256:97fbf6ea4592acd47ad3719f0244c67c68a4f61e15498eb47d3f40b5dba02256"}, + {file = "types_aiobotocore_elbv2-2.5.0.post1-py3-none-any.whl", hash = "sha256:3e657ccfdabb30a3258ad114ad1a3205298261cbbb7852a6ca948550b44f2bf4"}, +] + +[package.dependencies] +typing-extensions = ">=4.1.0" + +[[package]] +name = "types-aiobotocore-rds" +version = "2.5.0.post1" +description = "Type annotations for aiobotocore.RDS 2.5.0 service generated with mypy-boto3-builder 7.13.0" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "types-aiobotocore-rds-2.5.0.post1.tar.gz", hash = "sha256:25c7b6d4243ce9aa2c2f67df74e864dc8842ce4a93ffda3c949c79a116b3a577"}, + {file = "types_aiobotocore_rds-2.5.0.post1-py3-none-any.whl", hash = "sha256:a136135fd122f4b7462921ede0e3c27045be3f9265ddea8ff2354242de688cd0"}, +] + +[package.dependencies] +typing-extensions = ">=4.1.0" + +[[package]] +name = "types-aiobotocore-route53" +version = "2.5.0.post1" +description = "Type annotations for aiobotocore.Route53 2.5.0 service generated with mypy-boto3-builder 7.13.0" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "types-aiobotocore-route53-2.5.0.post1.tar.gz", hash = "sha256:1c1a882ed07ebbf57e0695c8f87b6da5007c7d803f12b1de391f3d4e032064e1"}, + {file = "types_aiobotocore_route53-2.5.0.post1-py3-none-any.whl", hash = "sha256:e598477399aa3ffcccf0d4c9088010f3db42222201976d8d00fdd1bb3ccb5df7"}, +] + +[package.dependencies] +typing-extensions = ">=4.1.0" + +[[package]] +name = "types-aiobotocore-s3" +version = "2.5.0.post1" +description = "Type annotations for aiobotocore.S3 2.5.0 service generated with mypy-boto3-builder 7.13.0" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "types-aiobotocore-s3-2.5.0.post1.tar.gz", hash = "sha256:7421293754e8c6202b1f29618d90ae6646a9e4551985d2645638394a20a600aa"}, + {file = "types_aiobotocore_s3-2.5.0.post1-py3-none-any.whl", hash = "sha256:29eeab8008486bd0d3876985cbcb8a3f1d998556d24ae619ab81b9f2c711c673"}, +] + +[package.dependencies] +typing-extensions = ">=4.1.0" + +[[package]] +name = "types-aiobotocore-sts" +version = "2.5.0.post1" +description = "Type annotations for aiobotocore.STS 2.5.0 service generated with mypy-boto3-builder 7.13.0" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "types-aiobotocore-sts-2.5.0.post1.tar.gz", hash = "sha256:e95cbce20a013e378584c218f254f160dbf4fbbc125a0f6b5f0223d8bf800c89"}, + {file = "types_aiobotocore_sts-2.5.0.post1-py3-none-any.whl", hash = "sha256:64b5b79de870f680d23b6561d1bef34985d870e67c267ad603d351f9ba54d323"}, +] + +[package.dependencies] +typing-extensions = ">=4.1.0" + [[package]] name = "types-awscrt" version = "0.16.17" @@ -5959,21 +5953,6 @@ files = [ [package.dependencies] types-urllib3 = "*" -[[package]] -name = "types-s3transfer" -version = "0.6.1" -description = "Type annotations and code completion for s3transfer" -category = "dev" -optional = false -python-versions = ">=3.7,<4.0" -files = [ - {file = "types_s3transfer-0.6.1-py3-none-any.whl", hash = "sha256:6d1ac1dedac750d570428362acdf60fdd4f277b0788855c3894d3226756b2bfb"}, - {file = "types_s3transfer-0.6.1.tar.gz", hash = "sha256:75ac1d7143d58c1e6af467cfd4a96c67ee058a3adf7c249d9309999e1f5f41e4"}, -] - -[package.dependencies] -types-awscrt = "*" - [[package]] name = "types-urllib3" version = "1.26.25.12" @@ -6288,4 +6267,4 @@ testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more [metadata] lock-version = "2.0" python-versions = ">=3.9,<4.0" -content-hash = "5faff16b7efedb18460d56b8f53efd523a392b2594b9560c0bd82084b9cb755c" +content-hash = "1d982cf938d75518b3749fc3101dbe1f2f414f185b3137d15d3bb6e48c8e06e3" diff --git a/pyproject.toml b/pyproject.toml index cbcc93e..6228ace 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -97,9 +97,8 @@ sphinx-design = "^0.3.0" sphinxcontrib-asciinema = "^0.3.6" [tool.poetry.group.aws.dependencies] -boto3 = "^1.24.63" -boto3-stubs = {extras = ["apigateway", "apigatewayv2", "ec2", "ecs", "elb", "elbv2", "rds", "route53", "route53domains", "s3", "sts"], version = "^1.24.63"} aioboto3 = "^11.2.0" +types-aiobotocore = {extras = ["apigateway", "apigatewayv2", "ec2", "ecs", "elb", "elbv2", "rds", "route53", "s3", "sts"], version = "^2.5.0.post2"} [tool.poetry.group.azure.dependencies] azure-cli = "^2.48.1" diff --git a/src/censys/cloud_connectors/aws_connector/connector.py b/src/censys/cloud_connectors/aws_connector/connector.py index 65b3d84..4c7a16a 100644 --- a/src/censys/cloud_connectors/aws_connector/connector.py +++ b/src/censys/cloud_connectors/aws_connector/connector.py @@ -1,19 +1,19 @@ """AWS Cloud Connector.""" import contextlib -from collections.abc import Sequence -from typing import AsyncGenerator, List, Optional +from collections.abc import AsyncGenerator, Sequence +from typing import List, Optional from aiobotocore.session import get_session from botocore.exceptions import ClientError -from mypy_boto3_ec2.type_defs import ( +from types_aiobotocore_apigateway.client import APIGatewayClient +from types_aiobotocore_apigatewayv2.client import ApiGatewayV2Client +from types_aiobotocore_ec2.client import EC2Client +from types_aiobotocore_ec2.type_defs import ( FilterTypeDef, NetworkInterfaceTypeDef, TagDescriptionTypeDef, TagTypeDef, ) -from types_aiobotocore_apigateway.client import APIGatewayClient -from types_aiobotocore_apigatewayv2.client import ApiGatewayV2Client -from types_aiobotocore_ec2.client import EC2Client from types_aiobotocore_ecs.client import ECSClient from types_aiobotocore_elb.client import ElasticLoadBalancingClient from types_aiobotocore_elbv2.client import ElasticLoadBalancingv2Client @@ -49,7 +49,7 @@ class AwsCloudConnector(CloudConnector): provider_settings: AwsSpecificSettings account_number: str - ignore_tags: List[str] + ignore_tags: list[str] def __init__(self, settings: Settings): """Initialize AWS Cloud Connectors. @@ -70,7 +70,7 @@ def __init__(self, settings: Settings): AwsResourceTypes.STORAGE_BUCKET: self.get_s3_instances, } - self.ignored_tags: List[str] = [] + self.ignored_tags: list[str] = [] self.global_ignored_tags: set[str] = set(IGNORED_TAGS) async def scan( @@ -107,73 +107,43 @@ async def scan_all(self): ) # type: ignore for provider_setting in provider_settings.values(): - if provider_setting.accounts: - for account in provider_setting.accounts: + accounts = provider_setting.accounts + if not accounts: + accounts = [None] + for account in accounts: + if account is not None: self.account_number = account.account_number self.ignored_tags = self.get_ignored_tags(account.ignore_tags) + else: + self.account_number = provider_setting.account_number + self.ignored_tags = self.get_ignored_tags( + provider_setting.ignore_tags + ) - for region in provider_setting.regions: - try: - with Healthcheck( - self.settings, + for region in provider_setting.regions: + try: + with Healthcheck( + self.settings, + provider_setting, + provider={ + "region": region, + "account_number": self.account_number, + }, + ): + credentials = await get_aws_credentials( + provider_setting, account, region + ) + await self.scan( provider_setting, - provider={ - "region": region, - "account_number": self.account_number, - }, - ): - credentials = await get_aws_credentials( - provider_setting, account, region - ) - await self.scan( - provider_setting, - credentials, - region, - ignored_tags=self.ignored_tags, - ) - except Exception as e: - self.logger.error( - f"Unable to scan account {self.account_number} in region {region}. Error: {e}" + credentials, + region, + ignored_tags=self.ignored_tags, ) - self.dispatch_event(EventTypeEnum.SCAN_FAILED, exception=e) - - else: - self.account_number = provider_setting.account_number - self.ignored_tags = self.get_ignored_tags(provider_setting.ignore_tags) - - for region in provider_setting.regions: - credentials = await get_aws_credentials( - provider_setting, None, region - ) - await self.scan( - provider_setting, - credentials, - region, - ignored_tags=self.ignored_tags, - ) - # try: - # with Healthcheck( - # self.settings, - # provider_setting, - # provider={ - # "region": region, - # "account_number": self.account_number, - # }, - # ): - # credentials = await get_aws_credentials( - # provider_setting, None, region - # ) - # await self.scan( - # provider_setting, - # credentials, - # region, - # ignored_tags=self.ignored_tags, - # ) - # except Exception as e: - # self.logger.error( - # f"Unable to scan account {self.account_number} in region {region}. Error: {e}" - # ) - # self.dispatch_event(EventTypeEnum.SCAN_FAILED, exception=e) + except Exception as e: + self.logger.error( + f"Unable to scan account {self.account_number} in region {region}. Error: {e}" + ) + self.dispatch_event(EventTypeEnum.SCAN_FAILED, exception=e) def format_label(self, service: SeedLabel, region: Optional[str] = None) -> str: """Format AWS label. diff --git a/src/censys/cloud_connectors/azure_connector/connector.py b/src/censys/cloud_connectors/azure_connector/connector.py index ab252ce..44b2eb5 100644 --- a/src/censys/cloud_connectors/azure_connector/connector.py +++ b/src/censys/cloud_connectors/azure_connector/connector.py @@ -1,5 +1,6 @@ """Azure Cloud Connector.""" -from typing import Any, AsyncGenerator, Optional +from collections.abc import AsyncGenerator +from typing import Any, Optional from azure.core.exceptions import ( ClientAuthenticationError, diff --git a/src/censys/cloud_connectors/common/connector.py b/src/censys/cloud_connectors/common/connector.py index 47a9fd4..5cb5543 100644 --- a/src/censys/cloud_connectors/common/connector.py +++ b/src/censys/cloud_connectors/common/connector.py @@ -1,10 +1,11 @@ """Base class for all cloud connectors.""" from abc import ABC, abstractmethod from collections import defaultdict +from collections.abc import Coroutine from enum import Enum from functools import partial from logging import Logger -from typing import Any, Callable, Coroutine, Optional, Union +from typing import Any, Callable, Optional, Union import aiometer from requests.exceptions import JSONDecodeError diff --git a/src/censys/cloud_connectors/common/plugins/registry.py b/src/censys/cloud_connectors/common/plugins/registry.py index 928bf01..38767e9 100644 --- a/src/censys/cloud_connectors/common/plugins/registry.py +++ b/src/censys/cloud_connectors/common/plugins/registry.py @@ -1,4 +1,5 @@ """Cloud Connector Plugin Registry.""" +import asyncio from enum import Enum from logging import Logger from pathlib import Path @@ -183,4 +184,7 @@ def dispatch_event( provider=context.get("provider"), service=context.get("service"), ): - handler(context, **kwargs) + if asyncio.iscoroutinefunction(handler): + asyncio.create_task(handler(context, **kwargs)) + else: + handler(context, **kwargs) diff --git a/src/censys/cloud_connectors/plugins/aws_tags.py b/src/censys/cloud_connectors/plugins/aws_tags.py index cc1cf0c..0a65930 100644 --- a/src/censys/cloud_connectors/plugins/aws_tags.py +++ b/src/censys/cloud_connectors/plugins/aws_tags.py @@ -1,14 +1,14 @@ """AWS Tags Cloud Connector Plugin.""" import contextlib import urllib.parse -from typing import Callable, Optional +from typing import Any, Callable, Optional from botocore.exceptions import ClientError -from mypy_boto3_elb import ElasticLoadBalancingClient -from mypy_boto3_elbv2 import ElasticLoadBalancingv2Client -from mypy_boto3_route53 import Route53Client -from mypy_boto3_s3 import S3Client from requests import HTTPError +from types_aiobotocore_elb.client import ElasticLoadBalancingClient +from types_aiobotocore_elbv2.client import ElasticLoadBalancingv2Client +from types_aiobotocore_route53.client import Route53Client +from types_aiobotocore_s3.client import S3Client from censys.asm import AsmClient from censys.common.exceptions import ( @@ -87,7 +87,7 @@ def get_client(self, context: EventContext) -> AsmClient: ) return self.client - def on_add_seed( + async def on_add_seed( self, context: EventContext, seed: Optional[Seed] = None, **kwargs ) -> None: """On add seed. @@ -110,7 +110,7 @@ def on_add_seed( service: Optional[AwsResourceTypes] = context.get("service") # type: ignore if service in tag_retrieval_handlers: try: - tag_retrieval_handlers[service](context, seed, **kwargs) + await tag_retrieval_handlers[service](context, seed, **kwargs) except CensysAsmException: pass except Exception as e: @@ -168,7 +168,9 @@ def format_tag_set_as_string(self, tag_set: dict) -> str: """ return f"{tag_set['Key']}: {tag_set['Value']}" - def add_ip_tags(self, context: EventContext, seed: IpSeed, tag_set: dict) -> None: + def add_ip_tags( + self, context: EventContext, seed: IpSeed, tag_set: list[dict] + ) -> None: """Add IP tags. Args: @@ -225,7 +227,7 @@ def add_subdomain_tag(self, domain_name: str, tag: str) -> None: self._add_subdomain_tag(base_domain, domain_name, tag) def add_domain_tags( - self, context: EventContext, seed: DomainSeed, tag_set: list[dict] + self, context: EventContext, seed: DomainSeed, tag_set: list[Any] ): """Add domain tags. @@ -243,7 +245,7 @@ def add_domain_tags( self.add_subdomain_tag(str(seed.value), tag_string) def add_cloud_asset_tags( - self, context: EventContext, cloud_asset: CloudAsset, tag_set: list[dict] + self, context: EventContext, cloud_asset: CloudAsset, tag_set: list[Any] ): """Add cloud asset tags. @@ -290,7 +292,7 @@ def _get_api_gateway_tags( self.add_domain_tags(context, seed, tag_set) - def _get_load_balancer_tags( + async def _get_load_balancer_tags( self, context: EventContext, seed: DomainSeed, **kwargs ) -> None: """Get Load Balancer tags. @@ -301,23 +303,25 @@ def _get_load_balancer_tags( kwargs: Additional event data. """ elb_res: Optional[dict] = kwargs.get("elb_res") - aws_client = kwargs.get("aws_client") + aws_client = kwargs.get("aws_client") # type: ignore[assignment] if not elb_res or not aws_client: return tag_set = None if load_balancer_arn := elb_res.get("LoadBalancerArn"): # V2 Load Balancer - aws_client: ElasticLoadBalancingv2Client = aws_client # type: ignore - tag_set = aws_client.describe_tags(ResourceArns=[load_balancer_arn])[ - "TagDescriptions" - ][0]["Tags"] + elbv2_client: ElasticLoadBalancingv2Client = aws_client + elbv2_tag_response = await elbv2_client.describe_tags( + ResourceArns=[load_balancer_arn] + ) + tag_set = elbv2_tag_response["TagDescriptions"][0].get("Tags", []) elif load_balancer_name := elb_res.get("LoadBalancerName"): # V1 Load Balancer - aws_client: ElasticLoadBalancingClient = aws_client # type: ignore - tag_set = aws_client.describe_tags(LoadBalancerNames=[load_balancer_name])[ - "TagDescriptions" - ][0]["Tags"] + elbv1_client: ElasticLoadBalancingClient = aws_client + elbv1_tag_response = await elbv1_client.describe_tags( + LoadBalancerNames=[load_balancer_name] + ) + tag_set = elbv1_tag_response["TagDescriptions"][0].get("Tags", []) if not tag_set: return @@ -363,7 +367,7 @@ def _get_rds_tags( self.add_domain_tags(context, seed, tags) - def _get_route53_tags( + async def _get_route53_tags( self, context: EventContext, seed: DomainSeed, **kwargs ) -> None: """Get Route53 tags. @@ -383,14 +387,19 @@ def _get_route53_tags( resource_id = route53_zone_res["Id"] if resource_id.startswith("/hostedzone/"): resource_id = resource_id.split("/hostedzone/")[1] - pre_processed_tags = client.list_tags_for_resource( + tag_response = await client.list_tags_for_resource( ResourceType="hostedzone", ResourceId=resource_id - )["ResourceTagSet"]["Tags"] + ) + pre_processed_tags = tag_response["ResourceTagSet"].get("Tags", []) if not pre_processed_tags: return - tags = {tag["Key"]: tag["Value"] for tag in pre_processed_tags} + tags = { + tag_key: tag_value + for tag in pre_processed_tags + if tag and (tag_key := tag.get("Key")) and (tag_value := tag.get("Value")) + } self.add_domain_tags(context, seed, tags) # type: ignore @@ -413,7 +422,7 @@ def _get_ecs_tags(self, context: EventContext, seed: DomainSeed, **kwargs) -> No self.add_domain_tags(context, seed, tag_set) - def _get_storage_bucket_tags( + async def _get_storage_bucket_tags( self, context: EventContext, cloud_asset: AwsStorageBucketAsset, **kwargs ) -> None: """Get S3 tags. @@ -429,7 +438,8 @@ def _get_storage_bucket_tags( return try: - tag_set = client.get_bucket_tagging(Bucket=bucket_name).get("TagSet", []) + tag_response = await client.get_bucket_tagging(Bucket=bucket_name) + tag_set = tag_response.get("TagSet", []) self.add_cloud_asset_tags(context, cloud_asset, tag_set) # type: ignore except ClientError: pass diff --git a/tests/test_aws_connector.py b/tests/test_aws_connector.py index c3f0e2d..b805eac 100644 --- a/tests/test_aws_connector.py +++ b/tests/test_aws_connector.py @@ -515,7 +515,7 @@ def test_get_s3_region_has_no_region(self): data = {"LocationConstraint": None} bucket_name = "test-bucket-1" - mock_client = self.mocker.patch("mypy_boto3_s3.client.S3Client", autospec=True) + mock_client = self.mocker.patch("types_aiobotocore_s3.client.S3Client", autospec=True) mock_bucket_location = self.mocker.patch.object( mock_client, "get_bucket_location", return_value=data ) From 971123062fc9aa603c7ef5336942088bc721af4d Mon Sep 17 00:00:00 2001 From: Aidan Holland Date: Tue, 20 Jun 2023 17:49:33 -0400 Subject: [PATCH 06/19] tests: update for async --- .vscode/settings.json | 3 + poetry.lock | 33 +- pyproject.toml | 3 + .../aws_connector/connector.py | 240 +------ .../aws_connector/credentials.py | 28 +- .../aws_connector/settings.py | 62 +- .../azure_connector/connector.py | 29 +- .../cloud_connectors/common/connector.py | 10 + .../gcp_connector/connector.py | 2 +- tests/base_connector_case.py | 7 - tests/test_aws_connector.py | 631 ++++++++++-------- tests/test_aws_credentials.py | 353 ++++++++++ tests/test_aws_settings.py | 146 +--- tests/test_azure_connector.py | 268 +++++--- tests/test_cli.py | 17 +- tests/test_connector.py | 53 +- tests/test_gcp_connector.py | 235 ++++--- tests/test_settings.py | 13 +- 18 files changed, 1230 insertions(+), 903 deletions(-) create mode 100644 tests/test_aws_credentials.py diff --git a/.vscode/settings.json b/.vscode/settings.json index 4528ffe..0bb27d9 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -6,6 +6,8 @@ "python.linting.enabled": true, "python.linting.flake8Enabled": true, "python.linting.flake8Path": "${workspaceFolder}/.venv/bin/flake8", + "mypy-type-checker.path": ["${workspaceFolder}/.venv/bin/mypy"], + "mypy-type-checker.importStrategy": "fromEnvironment", "python.poetryPath": "poetry", "python.terminal.activateEnvironment": true, "python.testing.pytestArgs": ["--no-cov"], @@ -17,6 +19,7 @@ "cSpell.words": [ "aiobotocore", "apigatewayv", + "asynctest", "autoupdate", "autouse", "aws", diff --git a/poetry.lock b/poetry.lock index 228a994..e19255e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -341,6 +341,18 @@ files = [ {file = "async_timeout-4.0.2-py3-none-any.whl", hash = "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"}, ] +[[package]] +name = "asynctest" +version = "0.13.0" +description = "Enhance the standard unittest package with features for testing asyncio libraries" +category = "dev" +optional = false +python-versions = ">=3.5" +files = [ + {file = "asynctest-0.13.0-py3-none-any.whl", hash = "sha256:5da6118a7e6d6b54d83a8f7197769d046922a44d2a99c21382f0a6e4fadae676"}, + {file = "asynctest-0.13.0.tar.gz", hash = "sha256:c27862842d15d83e6a34eb0b2866c323880eb3a75e4485b079ea11748fd77fac"}, +] + [[package]] name = "attrs" version = "23.1.0" @@ -4647,6 +4659,25 @@ tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} [package.extras] testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] +[[package]] +name = "pytest-asyncio" +version = "0.21.0" +description = "Pytest support for asyncio" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-asyncio-0.21.0.tar.gz", hash = "sha256:2b38a496aef56f56b0e87557ec313e11e1ab9276fc3863f6a7be0f1d0e415e1b"}, + {file = "pytest_asyncio-0.21.0-py3-none-any.whl", hash = "sha256:f2b3366b7cd501a4056858bd39349d5af19742aed2d81660b7998b6341c7eb9c"}, +] + +[package.dependencies] +pytest = ">=7.0.0" + +[package.extras] +docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] +testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy (>=0.931)", "pytest-trio (>=0.7.0)"] + [[package]] name = "pytest-cov" version = "3.0.0" @@ -6267,4 +6298,4 @@ testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more [metadata] lock-version = "2.0" python-versions = ">=3.9,<4.0" -content-hash = "1d982cf938d75518b3749fc3101dbe1f2f414f185b3137d15d3bb6e48c8e06e3" +content-hash = "75674c49d099b459b002225e4bae79a1d02e09dc588851359bc205087cde3f6d" diff --git a/pyproject.toml b/pyproject.toml index 6228ace..e9f9d88 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -75,8 +75,10 @@ mypy = "^0.942" types-PyYAML = "^6.0.5" types-requests = "^2.27.14" # Testing +asynctest = "^0.13.0" parameterized = "^0.8.1" pytest = "^7.1.1" +pytest-asyncio = "^0.21.0" pytest-cov = "^3.0.0" pytest-datadir = "^1.3.1" pytest-mock = "^3.7.0" @@ -151,6 +153,7 @@ module = [ "importlib_metadata", # Tests "parameterized", + "asynctest", # GCP "google.oauth2", # Azure diff --git a/src/censys/cloud_connectors/aws_connector/connector.py b/src/censys/cloud_connectors/aws_connector/connector.py index 4c7a16a..a2a8d80 100644 --- a/src/censys/cloud_connectors/aws_connector/connector.py +++ b/src/censys/cloud_connectors/aws_connector/connector.py @@ -1,7 +1,7 @@ """AWS Cloud Connector.""" import contextlib from collections.abc import AsyncGenerator, Sequence -from typing import List, Optional +from typing import Optional from aiobotocore.session import get_session from botocore.exceptions import ClientError @@ -109,17 +109,34 @@ async def scan_all(self): for provider_setting in provider_settings.values(): accounts = provider_setting.accounts if not accounts: + # If no accounts are configured, scan the default account accounts = [None] + else: + # Scan the default account first, then scan the rest + accounts = [None, *accounts] + + # Scan each account in the provider for account in accounts: + + # Use the account number from the account if it is configured if account is not None: self.account_number = account.account_number - self.ignored_tags = self.get_ignored_tags(account.ignore_tags) + self.ignored_tags = ( + self.get_ignored_tags(account.ignore_tags) + if account.ignore_tags + else self.get_ignored_tags(provider_setting.ignore_tags) + ) + + # Use the account number from the provider if it is not configured else: self.account_number = provider_setting.account_number self.ignored_tags = self.get_ignored_tags( provider_setting.ignore_tags ) + # TODO: Add support for global services + + # Scan each region in the account for region in provider_setting.regions: try: with Healthcheck( @@ -130,9 +147,12 @@ async def scan_all(self): "account_number": self.account_number, }, ): + # Get credentials for the account credentials = await get_aws_credentials( provider_setting, account, region ) + + # Scan the account await self.scan( provider_setting, credentials, @@ -155,173 +175,9 @@ def format_label(self, service: SeedLabel, region: Optional[str] = None) -> str: Returns: str: Formatted label. """ - region_label = f"/{region}" if region != "" else "" + region_label = f"/{region}" if region else "" return f"AWS: {service} - {self.account_number}{region_label}" - # async def credentials(self) -> dict: - # """Generate required credentials for AWS. - - # This method will attempt to use any active STS sessions before falling - # back on the regular provider settings. - - # Returns: - # dict: Boto Credential format. - # """ - # # Role name is the credential field which causes STS to activate. - # # Once activated the temporary STS creds will be used by all - # # subsequent AWS service client calls. - # if role_name := self.credential.get("role_name"): - # self.logger.debug(f"Using STS for role {role_name}") - # return await self.get_assume_role_credentials(role_name) - - # self.logger.debug("Using provider settings credentials") - # return self.boto_cred( - # self.region, - # self.provider_settings.access_key, - # self.provider_settings.secret_key, - # self.provider_settings.session_token, - # ) - - # async def get_aws_client_kwargs( - # self, service: AwsServices, credentials: Optional[dict] = None - # ) -> dict: - # """Creates an AWS client for the provided service. - - # Args: - # service (AwsServices): The AWS service name. - # credentials (AwsCredentials): Override credentials instead of using the default. - - # Raises: - # Exception: If the client could not be created. - - # Returns: - # dict: An AWS boto3 client. - # """ - # try: - # credentials = credentials or await self.credentials() - # if credentials.get("aws_access_key_id"): - # self.logger.debug(f"AWS Service {service} using access key credentials") - # return credentials - - # # calling client without credentials follows the standard - # # credential import path to source creds from the environment - # self.logger.debug( - # f"AWS Service {service} using external boto configuration" - # ) - # return {} # type: ignore - # except Exception as e: - # self.logger.error( - # f"Could not connect with client type '{service}'. Error: {e}" - # ) - # raise - - # async def get_assume_role_credentials(self, role_name: str) -> dict: - # """Acquire temporary STS credentials and cache them for the duration of the scan. - - # Args: - # role_name (str): Role name. - - # Returns: - # dict: STS credentials. - - # Raises: - # Exception: If the credentials could not be created. - # """ - # if self.temp_sts_cred: - # self.logger.debug("Using cached temporary STS credentials") - # else: - # try: - # temp_creds = await self.assume_role(role_name) - # self.temp_sts_cred = self.boto_cred( - # self.region, - # temp_creds["AccessKeyId"], - # temp_creds["SecretAccessKey"], - # temp_creds["SessionToken"], - # ) - # self.logger.debug( - # f"Created temporary STS credentials for role {role_name}" - # ) - # except Exception as e: - # self.logger.error(f"Failed to assume role: {e}") - # raise - - # return self.temp_sts_cred - - # def boto_cred( - # self, - # region_name: Optional[str] = None, - # access_key: Optional[str] = None, - # secret_key: Optional[str] = None, - # session_token: Optional[str] = None, - # ) -> dict[str, Any]: - # """Create a boto3 credential dict. Only params with values are included. - - # Args: - # region_name (str): AWS region. - # access_key (str): AWS access key. - # secret_key (str): AWS secret key. - # session_token (str): AWS session token. - - # Returns: - # dict: boto3 credential dict. - # """ - # cred = {} - - # if region_name: - # cred["region_name"] = region_name - - # if access_key: - # cred["aws_access_key_id"] = access_key - - # if secret_key: - # cred["aws_secret_access_key"] = secret_key - - # if session_token: - # cred["aws_session_token"] = session_token - - # return cred - - # async def assume_role( - # self, role_name: str = AwsDefaults.ROLE_NAME.value - # ) -> CredentialsTypeDef: - # """Acquire temporary credentials generated by Secure Token Service (STS). - - # This will always use the primary AWS account credentials when querying - # the STS service. - - # Args: - # role_name (str): Role name to assume. Defaults to "CensysCloudConnectorRole". - - # Returns: - # CredentialsTypeDef: Temporary credentials. - # """ - # credentials = self.boto_cred( - # self.region, - # self.provider_settings.access_key, - # self.provider_settings.secret_key, - # self.provider_settings.session_token, - # ) - - # # pass in explicit boto creds to force a new STS session - # aws_kwargs = await self.get_aws_client_kwargs( - # service=AwsServices.SECURE_TOKEN_SERVICE, # type: ignore - # credentials=credentials, - # ) - # async with get_session().create_client("sts", **aws_kwargs) as client: # type: ignore - # client: STSClient # type: ignore[no-redef] - # role: dict[str, Any] = { - # "RoleArn": f"arn:aws:iam::{self.account_number}:role/{role_name}", - # "RoleSessionName": self.credential["role_session_name"] - # or AwsDefaults.ROLE_SESSION_NAME.value, - # } - - # temp_creds = await client.assume_role(**role) - - # self.logger.debug( - # f"Assume role acquired temporary credentials for role {role_name}" - # ) - # return temp_creds["Credentials"] - async def get_api_gateway_domains_v1( self, provider_setting: AwsSpecificSettings, @@ -341,10 +197,6 @@ async def get_api_gateway_domains_v1( """ label = self.format_label(SeedLabel.API_GATEWAY, region) - # aws_kwargs = await self.get_aws_client_kwargs( - # service=AwsServices.API_GATEWAY, # type: ignore - # credentials=credentials, - # ) async with get_session().create_client( "apigateway", **credentials ) as client: # type: ignore @@ -380,10 +232,6 @@ async def get_api_gateway_domains_v2( """ label = self.format_label(SeedLabel.API_GATEWAY, region) - # aws_kwargs = await self.get_aws_client_kwargs( - # service=AwsServices.API_GATEWAY_V2, # type: ignore - # credentials=credentials, - # ) async with get_session().create_client( "apigatewayv2", **credentials ) as client: # type: ignore @@ -442,10 +290,6 @@ async def get_load_balancers_v1( """ label = self.format_label(SeedLabel.LOAD_BALANCER, region) - # aws_kwargs = await self.get_aws_client_kwargs( - # service=AwsServices.LOAD_BALANCER, # type: ignore - # credentials=credentials, - # ) async with get_session().create_client( "elb", **credentials, @@ -481,10 +325,6 @@ async def get_load_balancers_v2( """ label = self.format_label(SeedLabel.LOAD_BALANCER, region) - # aws_kwargs = await self.get_aws_client_kwargs( - # service=AwsServices.LOAD_BALANCER_V2, # type: ignore - # credentials=credentials, - # ) async with get_session().create_client( "elbv2", **credentials, @@ -582,10 +422,6 @@ async def describe_network_interfaces( """ interfaces = {} - # aws_kwargs = await self.get_aws_client_kwargs( - # service=AwsServices.EC2, # type: ignore - # credentials=credentials, - # ) async with get_session().create_client( "ec2", **credentials, @@ -634,9 +470,6 @@ async def get_resource_tags_paginated( Yields: AsyncGenerator[TagDescriptionTypeDef]: Tags. """ - # aws_kwargs = await self.get_aws_client_kwargs( - # service=AwsServices.EC2, # type: ignore - # ) async with get_session().create_client( "ec2", **credentials, @@ -720,9 +553,6 @@ async def get_rds_instances( """ label = self.format_label(SeedLabel.RDS, region) - # aws_kwargs = await self.get_aws_client_kwargs( - # service=AwsServices.RDS, # type: ignore - # ) async with get_session().create_client( "rds", **credentials, @@ -761,9 +591,6 @@ async def get_route53_zones( """ label = self.format_label(SeedLabel.ROUTE53_ZONES, region) - # aws_kwargs = await self.get_aws_client_kwargs( - # service=AwsServices.ROUTE53_ZONES, # type: ignore - # ) async with get_session().create_client( "route53", **credentials, @@ -829,16 +656,12 @@ async def get_ecs_instances( """ label = self.format_label(SeedLabel.ECS, region) - # ecs_aws_kwargs = await self.get_aws_client_kwargs( - # service=AwsServices.ECS, # type: ignore - # ) - # ec2_aws_kwargs = await self.get_aws_client_kwargs( - # service=AwsServices.EC2, # type: ignore - # ) - async with get_session().create_client( + session = get_session() + + async with session.create_client( "ecs", **credentials, - ) as ecs, get_session().create_client( + ) as ecs, session.create_client( "ec2", **credentials, ) as ec2: # type: ignore @@ -911,9 +734,6 @@ async def get_s3_instances( ignored_tags (list[str], optional): List of tags to ignore. Defaults to IGNORED_TAGS. current_service (str): Current service. """ - # aws_kwargs = await self.get_aws_client_kwargs( - # service=AwsServices.STORAGE_BUCKET, # type: ignore - # ) async with get_session().create_client( "s3", **credentials, @@ -927,12 +747,12 @@ async def get_s3_instances( if not bucket_name: continue - region = await self.get_s3_region(client, bucket_name) - label = self.format_label(SeedLabel.STORAGE_BUCKET, region) + s3_region = await self.get_s3_region(client, bucket_name) + label = self.format_label(SeedLabel.STORAGE_BUCKET, s3_region) with SuppressValidationError(): bucket_asset = AwsStorageBucketAsset( # type: ignore - value=AwsStorageBucketAsset.url(bucket_name, region), + value=AwsStorageBucketAsset.url(bucket_name, s3_region), uid=label, scan_data={ "accountNumber": self.account_number, diff --git a/src/censys/cloud_connectors/aws_connector/credentials.py b/src/censys/cloud_connectors/aws_connector/credentials.py index f78329f..1c992a1 100644 --- a/src/censys/cloud_connectors/aws_connector/credentials.py +++ b/src/censys/cloud_connectors/aws_connector/credentials.py @@ -104,17 +104,6 @@ async def get_aws_credentials( region=region, ) - # If neither the provider nor the account have credentials, but the provider has a role, assume it using local credentials - if provider_has_role: - assert provider_settings.role_name - assert provider_settings.role_session_name - return await assume_role( - account.account_number, - provider_settings.role_name, - provider_settings.role_session_name, - region=region, - ) - # If neither the provider nor the account have credentials, but the account has a role, assume it using local credentials if account_has_role: assert account.role_name @@ -126,8 +115,19 @@ async def get_aws_credentials( region=region, ) + # If neither the provider nor the account have credentials, but the provider has a role, assume it using local credentials + if provider_has_role: + assert provider_settings.role_name + assert provider_settings.role_session_name + return await assume_role( + account.account_number, + provider_settings.role_name, + provider_settings.role_session_name, + region=region, + ) + # If neither the provider nor the account have credentials or roles, use local credentials - return {} + return {} # pragma: no cover # If the provider has a role and credentials, assume it using the provider credentials if provider_has_role and provider_has_credentials: @@ -193,8 +193,6 @@ async def assume_role( Returns: AwsCredentials: The AWS credentials. """ - session = get_session() - # Format the role arn role_arn = f"arn:aws:iam::{account_number}:role/{role_name}" @@ -211,7 +209,7 @@ async def assume_role( client_kwargs["region_name"] = region # Create the sts client - async with session.create_client("sts", **client_kwargs) as client: + async with get_session().create_client("sts", **client_kwargs) as client: client: STSClient # type: ignore[no-redef] # Assume the role response = await client.assume_role( diff --git a/src/censys/cloud_connectors/aws_connector/settings.py b/src/censys/cloud_connectors/aws_connector/settings.py index 9058f7b..59fa031 100644 --- a/src/censys/cloud_connectors/aws_connector/settings.py +++ b/src/censys/cloud_connectors/aws_connector/settings.py @@ -8,6 +8,8 @@ from censys.cloud_connectors.common.enums import ProviderEnum from censys.cloud_connectors.common.settings import ProviderSpecificSettings +DEFAULT_IGNORE = ["censys-cloud-connector-ignore"] + class AwsAccountNumber(ConstrainedStr): """Account Number.""" @@ -23,7 +25,7 @@ class AwsAccount(BaseModel): secret_key: Optional[str] = Field(min_length=1) role_name: Optional[str] = Field(min_length=1) role_session_name: Optional[str] = Field(min_length=1) - ignore_tags: Optional[list[str]] = Field(min_length=1) + ignore_tags: list[str] = Field(description="Tags to ignore", default=DEFAULT_IGNORE) class AwsSpecificSettings(ProviderSpecificSettings): @@ -37,9 +39,7 @@ class AwsSpecificSettings(ProviderSpecificSettings): secret_key: Optional[str] = Field(min_length=1) role_name: Optional[str] = Field(min_length=1) role_session_name: Optional[str] = Field(min_length=1) - ignore_tags: list[str] = Field( - description="Tags to ignore", default=["censys-cloud-connector-ignore"] - ) + ignore_tags: list[str] = Field(description="Tags to ignore", default=DEFAULT_IGNORE) session_token: Optional[str] = Field(min_length=1) external_id: Optional[str] = Field(min_length=1) @@ -108,30 +108,30 @@ def from_dict(cls, data: dict): return cls(**data) - def get_credentials(self): - """Generator for all configured credentials. Any values within the accounts block will take precedence over the overall values. - - Yields: - dict[str, Any] - """ - yield { - "account_number": self.account_number, - "access_key": self.access_key, - "secret_key": self.secret_key, - "role_name": self.role_name, - "role_session_name": self.role_session_name, - "ignore_tags": self.ignore_tags, - } - - if self.accounts: - for account in self.accounts: - yield { - "account_number": (account.account_number or self.account_number), - "access_key": account.access_key, - "secret_key": account.secret_key, - "role_name": (account.role_name or self.role_name), - "role_session_name": ( - account.role_session_name or self.role_session_name - ), - "ignore_tags": (account.ignore_tags or self.ignore_tags), - } + # def get_credentials(self): + # """Generator for all configured credentials. Any values within the accounts block will take precedence over the overall values. + + # Yields: + # dict[str, Any] + # """ + # yield { + # "account_number": self.account_number, + # "access_key": self.access_key, + # "secret_key": self.secret_key, + # "role_name": self.role_name, + # "role_session_name": self.role_session_name, + # "ignore_tags": self.ignore_tags, + # } + + # if self.accounts: + # for account in self.accounts: + # yield { + # "account_number": (account.account_number or self.account_number), + # "access_key": account.access_key, + # "secret_key": account.secret_key, + # "role_name": (account.role_name or self.role_name), + # "role_session_name": ( + # account.role_session_name or self.role_session_name + # ), + # "ignore_tags": (account.ignore_tags or self.ignore_tags), + # } diff --git a/src/censys/cloud_connectors/azure_connector/connector.py b/src/censys/cloud_connectors/azure_connector/connector.py index 44b2eb5..59a3b41 100644 --- a/src/censys/cloud_connectors/azure_connector/connector.py +++ b/src/censys/cloud_connectors/azure_connector/connector.py @@ -15,7 +15,7 @@ from azure.mgmt.storage.aio import StorageManagementClient from azure.mgmt.storage.models import StorageAccount from azure.storage.blob import ContainerProperties -from azure.storage.blob.aio import BlobServiceClient, ContainerClient +from azure.storage.blob.aio import BlobServiceClient from censys.cloud_connectors.common.cloud_asset import AzureContainerAsset from censys.cloud_connectors.common.connector import CloudConnector @@ -306,6 +306,23 @@ async def _list_containers( await blob_service_client.close() return + async def get_storage_container_url( + self, blob_service_client: BlobServiceClient, container: ContainerProperties + ) -> str: + """Get Azure container URL. + + Args: + blob_service_client (BlobServiceClient): Blob service client. + container (ContainerProperties): Azure container properties. + + Returns: + str: Azure container URL. + """ + container_client = blob_service_client.get_container_client(container) + container_url = container_client.url + await container_client.close() + return container_url + async def get_storage_containers( self, _: AzureSpecificSettings, @@ -342,12 +359,10 @@ async def get_storage_containers( account_url = account.primary_endpoints.blob blob_service_client = BlobServiceClient(account_url, credentials) # type: ignore async for container in self._list_containers(blob_service_client, account): # type: ignore - container_client: Optional[ContainerClient] = None try: - container_client = blob_service_client.get_container_client( - container + container_url = await self.get_storage_container_url( + blob_service_client, container ) - container_url = container_client.url with SuppressValidationError(): container_asset = AzureContainerAsset( # type: ignore value=container_url, @@ -359,14 +374,10 @@ async def get_storage_containers( }, ) self.add_cloud_asset(container_asset, service=current_service) - await container_client.close() except ServiceRequestError as error: # pragma: no cover self.logger.error( f"Failed to get Azure container {container} for {account.name}: {error.message}" ) - finally: - if container_client: - await container_client.close() await blob_service_client.close() diff --git a/src/censys/cloud_connectors/common/connector.py b/src/censys/cloud_connectors/common/connector.py index 5cb5543..d9797fe 100644 --- a/src/censys/cloud_connectors/common/connector.py +++ b/src/censys/cloud_connectors/common/connector.py @@ -80,6 +80,11 @@ async def get_seeds(self, provider_settings, **kwargs) -> None: seed_scanner, provider_settings, current_service=seed_type, **kwargs ) for seed_type, seed_scanner in self.seed_scanners.items() + if ( + self.provider_settings.ignore + and seed_type not in self.provider_settings.ignore + ) + or not self.provider_settings.ignore ], max_at_once=self.settings.max_concurrent_scans, ) @@ -100,6 +105,11 @@ async def get_cloud_assets(self, provider_settings, **kwargs) -> None: **kwargs, ) for cloud_asset_type, cloud_asset_scanner in self.cloud_asset_scanners.items() + if ( + self.provider_settings.ignore + and cloud_asset_type not in self.provider_settings.ignore + ) + or not self.provider_settings.ignore ], max_at_once=self.settings.max_concurrent_scans, ) diff --git a/src/censys/cloud_connectors/gcp_connector/connector.py b/src/censys/cloud_connectors/gcp_connector/connector.py index 997d900..d024858 100644 --- a/src/censys/cloud_connectors/gcp_connector/connector.py +++ b/src/censys/cloud_connectors/gcp_connector/connector.py @@ -370,7 +370,7 @@ async def get_storage_buckets( if self_link := resource_properties.get("selfLink"): scan_data["selfLink"] = self_link with SuppressValidationError(): - bucket_asset = GcpStorageBucketAsset( + bucket_asset = GcpStorageBucketAsset( # type: ignore[call-arg] # TODO: Update when API can accept other urls value=f"https://storage.googleapis.com/{bucket_name}", uid=self.format_label(provider_settings, list_assets_result), diff --git a/tests/base_connector_case.py b/tests/base_connector_case.py index 30d403b..1aa920c 100644 --- a/tests/base_connector_case.py +++ b/tests/base_connector_case.py @@ -43,17 +43,10 @@ def assert_seeds_with_values(self, seeds: set[Seed], values: list[str]): Raises: AssertionError: If the seeds do not have the expected values. """ - # seeds_len = len(seeds) - # values_len = len(values) - # assert seeds_len == values_len, f"Expected {values_len} seeds, got {seeds_len}" seed_values = [seed.value for seed in seeds] seed_values.sort() values.sort() assert values == seed_values, f"Expected {values}, got {seed_values}" - # for seed in seeds: - # assert ( - # seed.value in values - # ), f"The seed {seed.value} is not in the expected values {values}" def mock_healthcheck(self) -> MagicMock: """Mock the healthcheck. diff --git a/tests/test_aws_connector.py b/tests/test_aws_connector.py index b805eac..49937b6 100644 --- a/tests/test_aws_connector.py +++ b/tests/test_aws_connector.py @@ -1,14 +1,18 @@ import json from typing import Any -from unittest import TestCase from unittest.mock import MagicMock, Mock, call -import pytest +import asynctest +from asynctest import TestCase +from botocore.exceptions import ClientError from parameterized import parameterized from censys.cloud_connectors.aws_connector.connector import AwsCloudConnector +from censys.cloud_connectors.aws_connector.credentials import ( + AwsCredentials, + get_aws_credentials, +) from censys.cloud_connectors.aws_connector.enums import ( - AwsDefaults, AwsResourceTypes, AwsServices, SeedLabel, @@ -19,19 +23,14 @@ from censys.cloud_connectors.common.seed import DomainSeed, IpSeed from tests.base_connector_case import BaseConnectorCase -failed_import = False -try: - from botocore.exceptions import ClientError -except ImportError: - failed_import = True - -@pytest.mark.skipif(failed_import, reason="AWS SDK not installed") class TestAwsConnector(BaseConnectorCase, TestCase): connector: AwsCloudConnector connector_cls = AwsCloudConnector + test_credentials: AwsCredentials + data: dict[str, dict] - def setUp(self) -> None: + async def setUp(self) -> None: super().setUp() # Note: responses contains a block that stores the credentials @@ -46,49 +45,67 @@ def setUp(self) -> None: self.connector.provider_settings = test_aws_settings self.connector.account_number = self.data["TEST_CREDS"]["account_number"] - creds = test_aws_settings.get_credentials() - cred = next(creds) - self.connector.credential = cred - self.region = self.data["TEST_CREDS"]["regions"][0] - self.connector.region = self.region + self.test_credentials = await get_aws_credentials( + test_aws_settings, None, self.region + ) + + def mock_session(self) -> asynctest.MagicMock: + """Mock the session creator. + + Returns: + MagicMock: mocked session + """ + mock_get_session = self.mocker.patch( + "censys.cloud_connectors.aws_connector.connector.get_session", + new_callable=asynctest.MagicMock(), + ) + mock_session = mock_get_session.return_value + return mock_session - def mock_client(self) -> MagicMock: + def mock_create_client( + self, mock_session: asynctest.MagicMock + ) -> asynctest.MagicMock: """Mock the client creator. + Args: + mock_session (asynctest.MagicMock): mocked session + Returns: MagicMock: mocked client """ - return self.mocker.patch.object(self.connector, "get_aws_client") + mock_create_client = mock_session.create_client + return mock_create_client - def mock_client_api_response( - self, client: MagicMock, method_name: str, data: Any - ) -> MagicMock: - """Mock the boto3 client API response. + def mock_client( + self, mock_create_client: asynctest.MagicMock + ) -> asynctest.MagicMock: + """Mock the client. Args: - client (MagicMock): mocked client - method_name (str): method name - data (Any): data to return + mock_create_client (asynctest.MagicMock): mocked client creator Returns: MagicMock: mocked client """ - return self.mocker.patch.object( - client.return_value, method_name, return_value=data - ) + mock_client = mock_create_client.return_value.__aenter__.return_value + return mock_client - def mock_api_response(self, method_name: str, data: Any) -> MagicMock: - """Create a client and mock the API response. + def mock_client_api_response( + self, mock_client: asynctest.MagicMock, method_name: str, data: Any + ) -> asynctest.MagicMock: + """Mock the boto3 client API response. Args: + mock_client (asynctest.MagicMock): mocked client method_name (str): method name data (Any): data to return Returns: MagicMock: mocked client """ - return self.mock_client_api_response(self.mock_client(), method_name, data) + setattr(mock_client, method_name, asynctest.CoroutineMock(return_value=data)) + return mock_client def mock_healthcheck(self) -> MagicMock: """Mock the healthcheck. @@ -100,74 +117,7 @@ def mock_healthcheck(self) -> MagicMock: "censys.cloud_connectors.aws_connector.connector.Healthcheck" ) - def test_get_aws_client(self): - # Test data - self.connector.provider_settings = AwsSpecificSettings.from_dict( - self.data["TEST_CREDS"] - ) - service = AwsServices.API_GATEWAY - - # Mock - mock_client = self.mocker.patch("boto3.client", autospec=True) - - # Actual call - self.connector.get_aws_client(service) - - # Assertions - mock_client.assert_called_with( - service, - region_name=self.connector.region, - aws_access_key_id=self.connector.provider_settings.access_key, - aws_secret_access_key=self.connector.provider_settings.secret_key, - ) - - def test_get_aws_client_uses_override_credentials(self): - service = AwsServices.API_GATEWAY - expected = self.data["TEST_BOTO_CRED_FULL"] - mock_client = self.mocker.patch("boto3.client", autospec=True) - mock_credentials = self.mocker.patch.object(self.connector, "credentials") - - self.connector.get_aws_client(service, expected) - - mock_client.assert_called_with(service, **expected) - mock_credentials.assert_not_called() - - def test_get_aws_client_no_key(self): - cred = self.data["TEST_BOTO_CRED_SSO"] - service = AwsServices.API_GATEWAY - mock_client = self.mocker.patch("boto3.client", autospec=True) - self.connector.get_aws_client(service, cred) - mock_client.assert_called_with(service) - - def test_credentials_using_role(self): - cred = self.data["TEST_GET_CREDENTIALS_WITH_ROLE"] - self.connector.credential = cred - mocked = self.mocker.patch.object(self.connector, "get_assume_role_credentials") - self.connector.credentials() - mocked.assert_called_once_with(cred["role_name"]) - - def test_credentials_using_access_key(self): - self.connector.credential = self.data["TEST_GET_CREDENTIALS_WITH_KEYS"] - mocked = self.mocker.patch.object(self.connector, "boto_cred") - self.connector.credentials() - mocked.assert_called_once_with( - self.connector.region, - self.connector.provider_settings.access_key, - self.connector.provider_settings.secret_key, - self.connector.provider_settings.session_token, - ) - - def test_boto_cred(self): - expected = self.data["TEST_BOTO_CRED_FULL"] - actual = self.connector.boto_cred( - expected["region_name"], - expected["aws_access_key_id"], - expected["aws_secret_access_key"], - expected["aws_session_token"], - ) - assert actual == expected - - def test_scan_all(self): + async def test_scan_all(self): # Test data test_single_account = self.data["TEST_ACCOUNTS"] test_aws_settings = [ @@ -176,14 +126,14 @@ def test_scan_all(self): provider_settings: dict[tuple, AwsSpecificSettings] = { p.get_provider_key(): p for p in test_aws_settings } - self.connector.settings.providers[self.connector.provider] = provider_settings + self.connector.settings.providers[self.connector.provider] = provider_settings # type: ignore[arg-type] # Mock scan - mock_scan = self.mocker.patch.object(self.connector, "scan") mock_healthcheck = self.mock_healthcheck() - - # Actual call - self.connector.scan_all() + # mock_scan = self.mocker.patch.object(self.connector, "scan") + with asynctest.patch.object(self.connector, "scan") as mock_scan: + # Actual call + await self.connector.scan_all() # Assertions expected_calls = 3 @@ -193,25 +143,24 @@ def test_scan_all(self): # TODO test multiple account_numbers with multiple regions # TODO test single account_number with multiple regions - def test_scan(self): - self.skipTest("TODO") # TODO + # def test_scan(self): + # self.skipTest("TODO") # TODO - # TODO test_scan_clears_account_and_region + # # TODO test_scan_clears_account_and_region - @parameterized.expand([(ClientError,)]) - def test_scan_fail(self, exception: Exception): - self.skipTest("TODO") # TODO + # @parameterized.expand([(ClientError,)]) + # def test_scan_fail(self, exception: Exception): + # self.skipTest("TODO") # TODO - def test_get_seeds(self): + async def test_get_seeds(self): # Test data self.connector.provider_settings = AwsSpecificSettings.from_dict( self.data["TEST_CREDS"] ) + # Mock seed_scanners = { - AwsResourceTypes.API_GATEWAY: self.mocker.Mock(), + AwsResourceTypes.API_GATEWAY: asynctest.CoroutineMock(), } - - # Mock self.mocker.patch.object( self.connector, "seed_scanners", @@ -219,45 +168,60 @@ def test_get_seeds(self): ) # Actual call - self.connector.get_seeds() + await self.connector.get_seeds(self.connector.provider_settings) # Assertions for scanner in self.connector.seed_scanners.values(): scanner.assert_called_once() - def test_get_api_gateway_domains(self): + async def test_get_api_gateway_domains(self): # Mock mocked_scanners = self.mocker.patch.multiple( self.connector, - get_api_gateway_domains_v1=self.mocker.Mock(), - get_api_gateway_domains_v2=self.mocker.Mock(), + get_api_gateway_domains_v1=asynctest.CoroutineMock(), + get_api_gateway_domains_v2=asynctest.CoroutineMock(), ) # Actual call - self.connector.get_api_gateway_domains() + await self.connector.get_api_gateway_domains( + self.connector.provider_settings, + self.test_credentials, + self.region, + [], + AwsResourceTypes.API_GATEWAY, + ) # Assertions for mocked_scanner in mocked_scanners.values(): mocked_scanner.assert_called_once_with() - def test_get_api_gateway_domains_v1_creates_seeds(self): + async def test_get_api_gateway_domains_v1_creates_seeds(self): # Test data domains = self.data["TEST_API_GATEWAY_DOMAINS_V1"].copy() test_label = f"AWS: API Gateway - 999999999999/{self.region}" test_seed_values = [f"first-id.execute-api.{self.region}.amazonaws.com"] # Mock - self.mock_api_response("get_rest_apis", domains) + mock_session = self.mock_session() + mock_create_client = self.mock_create_client(mock_session) + mock_client = self.mock_client(mock_create_client) + self.mock_client_api_response(mock_client, "get_rest_apis", domains) # Actual call - self.connector.get_api_gateway_domains_v1() + await self.connector.get_api_gateway_domains_v1( + self.connector.provider_settings, + self.test_credentials, + self.region, + [], + AwsResourceTypes.API_GATEWAY, + ) # Assertions self.assert_seeds_with_values( self.connector.seeds[test_label], test_seed_values ) - def test_api_gateway_domains_v2_creates_seeds(self): + async def test_api_gateway_domains_v2_creates_seeds(self): # Test data domains = self.data["TEST_API_GATEWAY_DOMAINS_V2"].copy() test_label = f"AWS: API Gateway - 999999999999/{self.region}" @@ -267,217 +231,339 @@ def test_api_gateway_domains_v2_creates_seeds(self): ] # Mock - self.mock_api_response("get_apis", domains) + mock_session = self.mock_session() + mock_create_client = self.mock_create_client(mock_session) + mock_client = self.mock_client(mock_create_client) + self.mock_client_api_response(mock_client, "get_apis", domains) # Actual call - self.connector.get_api_gateway_domains_v2() + await self.connector.get_api_gateway_domains_v2( + self.connector.provider_settings, + self.test_credentials, + self.region, + [], + AwsResourceTypes.API_GATEWAY, + ) # Assertions self.assert_seeds_with_values( self.connector.seeds[test_label], test_seed_values ) - def test_get_load_balancers(self): + async def test_get_load_balancers(self): # Mock mocked_scanners = self.mocker.patch.multiple( self.connector, - get_load_balancers_v1=self.mocker.Mock(), - get_load_balancers_v2=self.mocker.Mock(), + get_load_balancers_v1=asynctest.CoroutineMock(), + get_load_balancers_v2=asynctest.CoroutineMock(), ) # Actual call - self.connector.get_load_balancers() + await self.connector.get_load_balancers( + self.connector.provider_settings, + self.test_credentials, + self.region, + [], + AwsResourceTypes.LOAD_BALANCER, + ) # Assertions for mocked_scanner in mocked_scanners.values(): mocked_scanner.assert_called_once_with() - def test_get_elbv1_instances_creates_seeds(self): + async def test_get_elbv1_instances_creates_seeds(self): # Test data data = self.data["TEST_LOAD_BALANCER_V1"].copy() test_label = f"AWS: ELB - 999999999999/{self.region}" test_seed_values = ["my-load-balancer-1234567890.us-west-2.elb.amazonaws.com"] # Mock - self.mock_api_response("describe_load_balancers", data) + mock_session = self.mock_session() + mock_create_client = self.mock_create_client(mock_session) + mock_client = self.mock_client(mock_create_client) + self.mock_client_api_response(mock_client, "describe_load_balancers", data) # Actual call - self.connector.get_load_balancers_v1() + await self.connector.get_load_balancers_v1( + self.connector.provider_settings, + self.test_credentials, + self.region, + [], + AwsResourceTypes.LOAD_BALANCER, + ) # Assertions self.assert_seeds_with_values( self.connector.seeds[test_label], test_seed_values ) - def test_get_elbv2_instances_creates_seeds(self): + async def test_get_elbv2_instances_creates_seeds(self): # Test data data = self.data["TEST_LOAD_BALANCER_V2"].copy() test_label = f"AWS: ELB - 999999999999/{self.region}" test_seed_values = ["my-load-balancer-424835706.us-west-2.elb.amazonaws.com"] # Mock - self.mock_api_response("describe_load_balancers", data) + mock_session = self.mock_session() + mock_create_client = self.mock_create_client(mock_session) + mock_client = self.mock_client(mock_create_client) + self.mock_client_api_response(mock_client, "describe_load_balancers", data) # Actual call - self.connector.get_load_balancers_v2() + await self.connector.get_load_balancers_v2( + self.connector.provider_settings, + self.test_credentials, + self.region, + [], + AwsResourceTypes.LOAD_BALANCER, + ) # Assertions self.assert_seeds_with_values( self.connector.seeds[test_label], test_seed_values ) - def test_get_network_interfaces_creates_seeds(self): + async def test_get_network_interfaces_creates_seeds(self): # Test data data = self.data["TEST_NETWORK_INTERFACES"].copy() test_label = f"AWS: ENI - 999999999999/{self.region}" test_seed_values = ["108.156.117.66"] # Mock - self.mock_api_response("describe_network_interfaces", data) + mock_session = self.mock_session() + mock_create_client = self.mock_create_client(mock_session) + mock_client = self.mock_client(mock_create_client) + self.mock_client_api_response(mock_client, "describe_network_interfaces", data) # Actual call - self.connector.get_network_interfaces() + await self.connector.get_network_interfaces( + self.connector.provider_settings, + self.test_credentials, + self.region, + [], + AwsResourceTypes.NETWORK_INTERFACE, + ) # Assertions self.assert_seeds_with_values( self.connector.seeds[test_label], test_seed_values ) - def test_get_network_interfaces_ignores_tags(self): + async def test_get_network_interfaces_ignores_tags(self): + # Test data data = self.data["TEST_NETWORK_INTERFACES_IGNORES_TAGS"].copy() - self.mock_api_response("describe_network_interfaces", data) + self.connector.ignored_tags = ["test-ignore-tag-name"] + # Mock + mock_session = self.mock_session() + mock_create_client = self.mock_create_client(mock_session) + mock_client = self.mock_client(mock_create_client) + self.mock_client_api_response(mock_client, "describe_network_interfaces", data) add_seed = self.mocker.patch.object(self.connector, "add_seed") - self.connector.ignored_tags = ["test-ignore-tag-name"] - self.connector.get_network_interfaces() + # Actual call + await self.connector.get_network_interfaces( + self.connector.provider_settings, + self.test_credentials, + self.region, + [], + AwsResourceTypes.NETWORK_INTERFACE, + ) + + # Assertions add_seed.assert_not_called() - def test_ignore_tags_on_ec2_and_eni(self): + async def test_ignore_tags_on_ec2_and_eni(self): pass - def test_describe_network_interfaces_ignores_tags(self): + async def test_describe_network_interfaces_ignores_tags(self): + # Test data expected = { "3.87.58.15": { "NetworkInterfaceId": "eni-0754a4d9b25b09f20", "InstanceId": "i-0a9a18cd985cf3dcf", }, } - data = self.data["TEST_DESCRIBE_NETWORK_INTERFACES_IGNORES_TAGS"].copy() - self.mock_api_response("describe_network_interfaces", data) + + # Mock + mock_session = self.mock_session() + mock_create_client = self.mock_create_client(mock_session) + mock_client = self.mock_client(mock_create_client) + self.mock_client_api_response(mock_client, "describe_network_interfaces", data) + self.connector.ignored_tags = ["eni-ignore-tag-test"] - assert self.connector.describe_network_interfaces() == expected + # Actual call + network_interfaces = await self.connector.describe_network_interfaces( + self.connector.provider_settings, + self.test_credentials, + self.region, + [], + ) - def test_get_network_interfaces_ignores_instance_tags(self): + # Assertions + assert network_interfaces == expected + + async def test_get_network_interfaces_ignores_instance_tags(self): + # Test data data = self.data["TEST_DESCRIBE_NETWORK_INTERFACES_RESULT"].copy() + resource_tags = self.data["TEST_INSTANCE_RESOURCE_TAGS"].copy() + self.connector.ignored_tags = ["test-ignore-instance-tag-name"] + + # Mock + mock_describe_network_interfaces = asynctest.MagicMock() + mock_describe_network_interfaces.__aiter__.return_value = data self.mocker.patch.object( - self.connector, "describe_network_interfaces", return_value=data + self.connector, + "describe_network_interfaces", + return_value=mock_describe_network_interfaces, ) - resource_tags = self.data["TEST_INSTANCE_RESOURCE_TAGS"].copy() + mock_get_resource_tags_paginated = asynctest.MagicMock() + mock_get_resource_tags_paginated.__aiter__.return_value = resource_tags self.mocker.patch.object( - self.connector, "get_resource_tags_paginated", return_value=resource_tags + self.connector, + "get_resource_tags_paginated", + return_value=mock_get_resource_tags_paginated, ) - add_seed = self.mocker.patch.object(self.connector, "add_seed") - self.connector.ignored_tags = ["test-ignore-instance-tag-name"] - self.connector.get_network_interfaces() + # Actual call + await self.connector.get_network_interfaces( + self.connector.provider_settings, + self.test_credentials, + self.region, + [], + AwsResourceTypes.NETWORK_INTERFACE, + ) + + # Assertions add_seed.assert_not_called() - def test_get_resource_tags_handles_multiple_formats(self): + async def test_get_resource_tags_handles_multiple_formats(self): + # Test data expected = { "test-resource-id-1": ["resource-tag-in-key", "resource-tag-in-value"] } data = self.data["TEST_RESOURCE_TAGS_MULTIPLE_FORMATS"].copy() + + # Mock + mock_get_resource_tags_paginated = asynctest.MagicMock() + mock_get_resource_tags_paginated.__aiter__.return_value = data self.mocker.patch.object( - self.connector, "get_resource_tags_paginated", return_value=data + self.connector, + "get_resource_tags_paginated", + return_value=mock_get_resource_tags_paginated, ) - assert self.connector.get_resource_tags() == expected - def test_rds_instances_creates_seeds(self): + # Actual call + resource_tags = await self.connector.get_resource_tags( + self.test_credentials, ["instance"] + ) + + # Assertions + assert resource_tags == expected + + async def test_rds_instances_creates_seeds(self): # Test data data = self.data["TEST_RDS_INSTANCES"].copy() test_label = f"AWS: RDS - 999999999999/{self.region}" test_seed_values = [f"my-db-instance.ccc.{self.region}.rds.amazonaws.com"] # Mock - self.mock_api_response("describe_db_instances", data) + mock_session = self.mock_session() + mock_create_client = self.mock_create_client(mock_session) + mock_client = self.mock_client(mock_create_client) + self.mock_client_api_response(mock_client, "describe_db_instances", data) # Actual call - self.connector.get_rds_instances() + await self.connector.get_rds_instances( + self.connector.provider_settings, + self.test_credentials, + self.region, + [], + AwsResourceTypes.RDS, + ) # Assertions self.assert_seeds_with_values( self.connector.seeds[test_label], test_seed_values ) - def test_rds_skips_private_instances(self): + async def test_rds_skips_private_instances(self): # Test data data = self.data["TEST_RDS_SKIPS_PRIVATE"].copy() # Mock - self.mock_api_response("describe_db_instances", data) + mock_session = self.mock_session() + mock_create_client = self.mock_create_client(mock_session) + mock_client = self.mock_client(mock_create_client) + self.mock_client_api_response(mock_client, "describe_db_instances", data) # Actual call - self.connector.get_rds_instances() - - # Assertions - assert self.connector.seeds == {} - - def test_route53_zones_creates_seeds(self): - # Test data - hosts = self.data["TEST_ROUTE53_ZONES_LIST_HOSTED_ZONES"].copy() - resources = self.data["TEST_ROUTE53_ZONES_LIST_RESOURCE_RECORD_SETS"].copy() - test_label = f"AWS: Route53/Zones - 999999999999/{self.region}" - expected_calls = [ - call( - DomainSeed(value="example.com", label=test_label), - route53_zone_res=self.mocker.ANY, - aws_client=self.mocker.ANY, - ), - call( - DomainSeed(value="example.com", label=test_label), - route53_zone_res=self.mocker.ANY, - aws_client=self.mocker.ANY, - ), - call( - DomainSeed(value="sub.example.com", label=test_label), - route53_zone_res=self.mocker.ANY, - aws_client=self.mocker.ANY, - ), - ] - - # Mock - self.mocker.patch.multiple( - self.connector, - _get_route53_zone_hosts=Mock(return_value=hosts), - _get_route53_zone_resources=Mock(return_value=resources), + await self.connector.get_rds_instances( + self.connector.provider_settings, + self.test_credentials, + self.region, + [], + AwsResourceTypes.RDS, ) - mock_add_seed = self.mocker.patch.object(self.connector, "add_seed") - - # Actual Call - self.connector.get_route53_zones() - # Assertions - mock_add_seed.assert_has_calls(expected_calls) - assert mock_add_seed.call_count == 3 + assert self.connector.seeds == {} - def test_route53_zones_pagination(self): - self.skipTest("TODO client.get_paginator") + # async def test_route53_zones_creates_seeds(self): + # # Test data + # hosts = self.data["TEST_ROUTE53_ZONES_LIST_HOSTED_ZONES"].copy() + # resources = self.data["TEST_ROUTE53_ZONES_LIST_RESOURCE_RECORD_SETS"].copy() + # test_label = f"AWS: Route53/Zones - 999999999999/{self.region}" + # expected_calls = [ + # call( + # DomainSeed(value="example.com", label=test_label), + # route53_zone_res=self.mocker.ANY, + # aws_client=self.mocker.ANY, + # ), + # call( + # DomainSeed(value="example.com", label=test_label), + # route53_zone_res=self.mocker.ANY, + # aws_client=self.mocker.ANY, + # ), + # call( + # DomainSeed(value="sub.example.com", label=test_label), + # route53_zone_res=self.mocker.ANY, + # aws_client=self.mocker.ANY, + # ), + # ] + + # # Mock + # self.mocker.patch.multiple( + # self.connector, + # _get_route53_zone_hosts=Mock(return_value=hosts), + # _get_route53_zone_resources=Mock(return_value=resources), + # ) + + # mock_add_seed = self.mocker.patch.object(self.connector, "add_seed") + + # # Actual Call + # await self.connector.get_route53_zones(self.connector.provider_settings) + + # # Assertions + # mock_add_seed.assert_has_calls(expected_calls) + # assert mock_add_seed.call_count == 3 + + # async def test_route53_zones_pagination(self): + # self.skipTest("TODO client.get_paginator") # TODO test_route53_invalid_domain_raises - def test_get_s3_instances_creates_seeds(self): + async def test_get_s3_instances_creates_seeds(self): # Test data data = self.data["TEST_S3_BUCKETS"].copy() test_label = f"AWS: S3 - 999999999999/{self.region}" expected_calls = [ call( - AwsStorageBucketAsset( + AwsStorageBucketAsset( # type: ignore[call-arg] value="https://test-bucket-1.s3.test-region-1.amazonaws.com", uid=test_label, scan_data={"accountNumber": "999999999999"}, @@ -486,7 +572,7 @@ def test_get_s3_instances_creates_seeds(self): aws_client=self.mocker.ANY, ), call( - AwsStorageBucketAsset( + AwsStorageBucketAsset( # type: ignore[call-arg] value="https://test-bucket-2.s3.test-region-1.amazonaws.com", uid=test_label, scan_data={"accountNumber": "999999999999"}, @@ -497,7 +583,10 @@ def test_get_s3_instances_creates_seeds(self): ] # Mock - self.mock_api_response("list_buckets", data) + mock_session = self.mock_session() + mock_create_client = self.mock_create_client(mock_session) + mock_client = self.mock_client(mock_create_client) + self.mock_client_api_response(mock_client, "list_buckets", data) mock_add = self.mocker.patch.object(self.connector, "add_cloud_asset") self.mocker.patch.object( @@ -505,42 +594,63 @@ def test_get_s3_instances_creates_seeds(self): ).return_value = self.region # Actual Call - self.connector.get_s3_instances() + await self.connector.get_s3_instances( + self.connector.provider_settings, + self.test_credentials, + self.region, + [], + AwsResourceTypes.STORAGE_BUCKET, + ) # Assertions mock_add.assert_has_calls(expected_calls) assert mock_add.call_count == 2 - def test_get_s3_region_has_no_region(self): + async def test_get_s3_region_has_no_region(self): data = {"LocationConstraint": None} bucket_name = "test-bucket-1" - mock_client = self.mocker.patch("types_aiobotocore_s3.client.S3Client", autospec=True) + mock_client = self.mocker.patch( + "types_aiobotocore_s3.client.S3Client", autospec=True + ) mock_bucket_location = self.mocker.patch.object( mock_client, "get_bucket_location", return_value=data ) - region = self.connector.get_s3_region(mock_client, bucket_name) + region = await self.connector.get_s3_region(mock_client, bucket_name) mock_bucket_location.assert_called_once_with(Bucket=bucket_name) # TODO: use AwsDefaults.REGION.value when available assert region == "us-east-1" - def test_get_s3_handles_bucket_region_exception(self): + async def test_get_s3_handles_bucket_region_exception(self): + # Test data buckets = self.data["TEST_S3_BUCKETS"].copy() - self.mock_api_response("list_buckets", buckets) + # Mock + mock_session = self.mock_session() + mock_create_client = self.mock_create_client(mock_session) + mock_client = self.mock_client(mock_create_client) + self.mock_client_api_response(mock_client, "list_buckets", buckets) self.mocker.patch.object( self.connector, "get_s3_region", side_effect=ClientError({}, "test") ) mock_add_asset = self.mocker.patch.object(self.connector, "add_cloud_asset") mock_log = self.mocker.patch.object(self.connector.logger, "error") - self.connector.get_s3_instances() + # Actual Call + await self.connector.get_s3_instances( + self.connector.provider_settings, + self.test_credentials, + self.region, + [], + AwsResourceTypes.STORAGE_BUCKET, + ) + # Assertions mock_add_asset.assert_not_called() mock_log.assert_called_once() - def test_ecs_instances_creates_seeds(self): + async def test_ecs_instances_creates_seeds(self): # Test data clusters = self.data["TEST_ECS_LIST_CLUSTERS"].copy() containers = self.data["TEST_ECS_LIST_CONTAINER_INSTANCES"].copy() @@ -559,87 +669,42 @@ def test_ecs_instances_creates_seeds(self): ] # Mock - ecs_client = self.mock_client() - self.mocker.patch.multiple( - ecs_client, - list_clusters=Mock(return_value=clusters), - list_container_instances=Mock(return_value=containers), - describe_container_instances=Mock(return_value=instances), - ) + session = self.mock_session() + mock_create_client = self.mock_create_client(session) - ec2_client = self.mock_client() - self.mocker.patch.object( - ec2_client, "describe_instances", Mock(return_value=descriptions) + ecs_client = self.mock_client(mock_create_client) + self.mock_client_api_response(ecs_client, "list_clusters", clusters) + self.mock_client_api_response( + ecs_client, "list_container_instances", containers + ) + self.mock_client_api_response( + ecs_client, "describe_container_instances", instances ) - client_factory = self.mocker.MagicMock() - client_factory.side_effect = { - AwsServices.ECS: ecs_client, - AwsServices.EC2: ec2_client, - }.get - self.mocker.patch.object(self.connector, "get_aws_client", client_factory) + ec2_client = self.mock_client(mock_create_client) + self.mock_client_api_response(ec2_client, "describe_instances", descriptions) mock_add_seed = self.mocker.patch.object(self.connector, "add_seed") # Actual call - self.connector.get_ecs_instances() + await self.connector.get_ecs_instances( + self.connector.provider_settings, + self.test_credentials, + self.region, + [], + AwsResourceTypes.ECS, + ) # Assertions mock_add_seed.assert_has_calls(expected_calls) assert mock_add_seed.call_count == 2 - def test_assume_role(self): - # Test data - data = self.data["TEST_STS"].copy() - - # Mock - mock = self.mock_api_response("assume_role", data) - - # Actual call - result = self.connector.assume_role() - - # Assertions - assert result["AccessKeyId"] == "sts-access-key-value" - mock.assert_called_with( - RoleArn="arn:aws:iam::999999999999:role/CensysCloudConnectorRole", - RoleSessionName=AwsDefaults.ROLE_SESSION_NAME.value, - ) - - def test_assume_role_with_custom_names(self): - expected_role = "test-override-role-name" - expected_role_session = "test-override-role-name" - data = self.data["TEST_STS"].copy() - mock = self.mock_api_response("assume_role", data) - self.connector.credential["role_session_name"] = expected_role_session - - self.connector.assume_role(expected_role) - - mock.assert_called_with( - RoleArn=f"arn:aws:iam::999999999999:role/{expected_role}", - RoleSessionName=expected_role_session, - ) - - def test_get_assume_role_credentials_uses_cache(self): - expected = self.data["TEST_GET_CREDENTIALS_WITH_ROLE"] - self.connector.temp_sts_cred = expected - assert self.connector.get_assume_role_credentials() == expected - - def test_get_assume_role_credentials(self): - role_name = "test-assume-role-name" - expected = self.data["TEST_BOTO_CRED_FULL"] - - assume_role = self.mock_api_response("assume_role", self.data["TEST_STS"]) - self.mocker.patch.object(self.connector, "boto_cred", return_value=expected) - - assert self.connector.get_assume_role_credentials(role_name) == expected - assume_role.assert_called_once() - def test_format_label_without_region(self): # Test data - expected = f"AWS: S3 - 999999999999/{self.region}" + expected = "AWS: S3 - 999999999999" # Actual call - label = self.connector.format_label(SeedLabel.STORAGE_BUCKET) + label = self.connector.format_label(SeedLabel.STORAGE_BUCKET, region=None) # Assertions assert label == expected @@ -661,7 +726,7 @@ def test_format_label_with_connector_region(self): expected = f"AWS: S3 - 999999999999/{self.region}" # Actual call - label = self.connector.format_label(SeedLabel.STORAGE_BUCKET) + label = self.connector.format_label(SeedLabel.STORAGE_BUCKET, self.region) # Assertions assert label == expected @@ -683,5 +748,5 @@ def test_no_ignored_tag(self): def test_extract_tags_from_tagset(self): tag_set = [{"Key": "tag-1"}, {"Key": "tag-2"}] - tags = self.connector.extract_tags_from_tagset(tag_set) + tags = self.connector.extract_tags_from_tagset(tag_set) # type: ignore[arg-type] assert tags == ["tag-1", "tag-2"] diff --git a/tests/test_aws_credentials.py b/tests/test_aws_credentials.py new file mode 100644 index 0000000..43ff176 --- /dev/null +++ b/tests/test_aws_credentials.py @@ -0,0 +1,353 @@ +from typing import Optional + +import asynctest +from asynctest import TestCase +from parameterized import parameterized + +from censys.cloud_connectors.aws_connector.credentials import ( + assume_role, + get_aws_credentials, +) +from censys.cloud_connectors.aws_connector.settings import ( + AwsAccount, + AwsAccountNumber, + AwsSpecificSettings, +) +from tests.base_case import BaseCase + + +class TestAwsCredentials(BaseCase, TestCase): + @parameterized.expand( + [ + ( + "account-creds-assume-account-role", + {"region_name": "us-east-1"}, + { + "account_number": "123123123123", + "access_key": None, + "secret_key": None, + "role_name": "test-role-name-settings", + "role_session_name": "test-role-session-name-settings", + }, + ( + "321321321321", + "test-role-name-account", + "test-role-session-name-account", + ), + { + "access_key": "xxxxxxxxxxxxxxxxxxxx", + "secret_key": "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx", + "region": "us-east-1", + }, + AwsAccount( + account_number=AwsAccountNumber("321321321321"), + access_key="xxxxxxxxxxxxxxxxxxxx", + secret_key="xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx", + role_name="test-role-name-account", + role_session_name="test-role-session-name-account", + ), + "us-east-1", + ), + ( + "account-creds-assume-settings-role", + {"region_name": "us-east-1"}, + { + "account_number": "123123123123", + "access_key": None, + "secret_key": None, + "role_name": "test-role-name-settings", + "role_session_name": "test-role-session-name-settings", + }, + ( + "321321321321", + "test-role-name-settings", + "test-role-session-name-settings", + ), + { + "access_key": "xxxxxxxxxxxxxxxxxxxx", + "secret_key": "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx", + "region": "us-east-1", + }, + AwsAccount( + account_number=AwsAccountNumber("321321321321"), + access_key="xxxxxxxxxxxxxxxxxxxx", + secret_key="xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx", + role_name=None, + role_session_name=None, + ), + "us-east-1", + ), + ( + "settings-creds-assume-account-role", + {"region_name": "us-east-1"}, + { + "account_number": "123123123123", + "access_key": "xxxxxxxxxxxxxxxxxxxx", + "secret_key": "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx", + "role_name": None, + "role_session_name": None, + }, + ( + "321321321321", + "test-role-name-account", + "test-role-session-name-account", + ), + { + "access_key": "xxxxxxxxxxxxxxxxxxxx", + "secret_key": "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx", + "region": "us-east-1", + }, + AwsAccount( + account_number=AwsAccountNumber("321321321321"), + access_key=None, + secret_key=None, + role_name="test-role-name-account", + role_session_name="test-role-session-name-account", + ), + "us-east-1", + ), + ( + "settings-creds-assume-settings-role", + {"region_name": "us-east-1"}, + { + "account_number": "123123123123", + "access_key": "xxxxxxxxxxxxxxxxxxxx", + "secret_key": "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx", + "role_name": "test-role-name-settings", + "role_session_name": "test-role-session-name-settings", + }, + ( + "321321321321", + "test-role-name-settings", + "test-role-session-name-settings", + ), + { + "access_key": "xxxxxxxxxxxxxxxxxxxx", + "secret_key": "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx", + "region": "us-east-1", + }, + AwsAccount( + account_number=AwsAccountNumber("321321321321"), + access_key=None, + secret_key=None, + role_name=None, + role_session_name=None, + ), + "us-east-1", + ), + ( + "settings-creds-assume-settings-role", + {"region_name": "us-east-1"}, + { + "account_number": "123123123123", + "access_key": "xxxxxxxxxxxxxxxxxxxx", + "secret_key": "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx", + "role_name": "test-role-name", + "role_session_name": "test-role-session-name", + }, + ("123123123123", "test-role-name", "test-role-session-name"), + { + "access_key": "xxxxxxxxxxxxxxxxxxxx", + "secret_key": "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx", + "region": "us-east-1", + }, + None, + "us-east-1", + ), + ( + "local-creds-assume-settings-role", + {"region_name": "us-east-1"}, + { + "account_number": "123123123123", + "access_key": None, + "secret_key": None, + "role_name": "test-role-name", + "role_session_name": "test-role-session-name", + }, + ("123123123123", "test-role-name", "test-role-session-name"), + { + "region": "us-east-1", + }, + None, + "us-east-1", + ), + ( + "local-creds-assume-settings-role-account", + {"region_name": "us-east-2"}, + { + "account_number": "123123123123", + "access_key": None, + "secret_key": None, + "role_name": "test-role-name", + "role_session_name": "test-role-session-name", + }, + ("321321321321", "test-role-name", "test-role-session-name"), + { + "region": "us-east-2", + }, + AwsAccount( + account_number=AwsAccountNumber("321321321321"), + access_key=None, + secret_key=None, + role_name=None, + role_session_name=None, + ), + "us-east-2", + ), + ( + "local-creds-assume-account-role-account", + {"region_name": "us-east-3"}, + { + "account_number": "123123123123", + "access_key": None, + "secret_key": None, + "role_name": "test-role-name-settings", + "role_session_name": "test-role-session-name-settings", + }, + ( + "321321321321", + "test-role-name-account", + "test-role-session-name-account", + ), + { + "region": "us-east-3", + }, + AwsAccount( + account_number=AwsAccountNumber("321321321321"), + access_key=None, + secret_key=None, + role_name="test-role-name-account", + role_session_name="test-role-session-name-account", + ), + "us-east-3", + ), + ] + ) + async def test_get_aws_credentials( + self, + name, + expected_credentials: dict, + provider_settings_dict: dict, + assume_role_call_args: Optional[tuple] = None, + assume_role_call_kwargs: Optional[dict] = None, + account: Optional[AwsAccount] = None, + region: Optional[str] = None, + ): + # Test data + provider_settings = AwsSpecificSettings.from_dict( + { + "regions": ["us-east-1"], + **provider_settings_dict, + } + ) + + # Mocks + mock_assume_role = self.mocker.patch( + "censys.cloud_connectors.aws_connector.credentials.assume_role", + new_callable=asynctest.CoroutineMock, + ) + + # Actual call + credentials = await get_aws_credentials( + provider_settings, account=account, region=region + ) + + # Assertions + if assume_role_call_args: + if not assume_role_call_kwargs: + assume_role_call_kwargs = {} + mock_assume_role.assert_awaited_once_with( + *assume_role_call_args, **assume_role_call_kwargs + ) + assert credentials == mock_assume_role.return_value + else: + mock_assume_role.assert_not_awaited() + assert credentials == expected_credentials + + @parameterized.expand( + [ + ("no-credentials",), + ( + "access-key-pair", + "xxxxxxxxxxxxxxxxxxxx", + "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx", + ), + ( + "access-key-pair-region", + "xxxxxxxxxxxxxxxxxxxx", + "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx", + "us-east-1", + ), + ( + "region-only", + None, + None, + "us-east-1", + ), + ] + ) + async def test_assume_role( + self, + name: str, + access_key: Optional[str] = None, + secret_key: Optional[str] = None, + region: Optional[str] = None, + ): + # Test data + account_number = "123123123123" + role_name = "test-role-name" + role_session_name = "test-role-session-name" + role_arn = f"arn:aws:iam::{account_number}:role/{role_name}" + expected_credentials = { + "aws_access_key_id": access_key or "test-access-key-id", + "aws_secret_access_key": secret_key or "test-secret-access-key", + "aws_session_token": "test-session-token", + } + create_client_kwargs = {} + if region: + create_client_kwargs["region_name"] = region + expected_credentials["region_name"] = region + if access_key and secret_key: + create_client_kwargs["aws_access_key_id"] = access_key + create_client_kwargs["aws_secret_access_key"] = secret_key + + # Mocks + mock_get_session = self.mocker.patch( + "censys.cloud_connectors.aws_connector.credentials.get_session", + new_callable=asynctest.MagicMock(), + ) + mock_session = mock_get_session.return_value + mock_create_client = mock_session.create_client + mock_client = mock_create_client.return_value.__aenter__.return_value + mock_client.assume_role = asynctest.CoroutineMock( + return_value={ + "Credentials": { + "AccessKeyId": access_key or "test-access-key-id", + "SecretAccessKey": secret_key or "test-secret-access-key", + "SessionToken": "test-session-token", + } + } + ) + mock_assume_role = mock_client.assume_role + + # Actual call + assumed_credentials = await assume_role( + account_number=account_number, + role_name=role_name, + role_session_name=role_session_name, + access_key=access_key, + secret_key=secret_key, + region=region, + ) + + # Assertions + mock_get_session.assert_called_once_with() + mock_create_client.assert_called_once_with( + "sts", + **create_client_kwargs, + ) + mock_assume_role.assert_awaited_once_with( + RoleArn=role_arn, + RoleSessionName=role_session_name, + ) + assert assumed_credentials == expected_credentials diff --git a/tests/test_aws_settings.py b/tests/test_aws_settings.py index a8378be..cc73a72 100644 --- a/tests/test_aws_settings.py +++ b/tests/test_aws_settings.py @@ -62,137 +62,39 @@ def get_settings_file(self, file_name) -> list[AwsSpecificSettings]: settings: list[AwsSpecificSettings] = list(provider_settings.values()) # type: ignore return settings - def get_credentials(self, file_name) -> list[dict]: - """Get the AWS credential data from a test providers.yml file. - - Args: - file_name (str): Filename. - - Returns: - list[dict]: Credentials. - """ - settings = self.get_settings_file(file_name) - setting = settings[0] # type: ignore - return list(setting.get_credentials()) - def test_missing_role_and_access_key(self): with pytest.raises(ValueError, match="Specify either access_key"): - AwsSpecificSettings( + AwsSpecificSettings( # type: ignore[call-arg] account_number="123123123123", regions=["us-east-1"], ) - def test_primary_get_credentials(self): - settings = self.get_settings_file("primary_access_key.yml") - setting = settings[0] - credentials = list(setting.get_credentials()) - credential = credentials[0] - assert len(settings) == 1 - assert len(credentials) == 1 - assert setting.regions == ["test-region"] - assert credential["account_number"] == "111111111111" - - def test_parent_key_child_role_loads_parent_key(self): - credential = self.get_credentials("accounts_parent_key_child_role.yml")[0] - assert credential["access_key"] == "example-access-key-1" - assert credential["secret_key"] == "example-secret-key-1" - - def test_parent_key_child_role_loads_child_role(self): - credential = self.get_credentials("accounts_parent_key_child_role.yml")[1] - assert credential["role_name"] == "example-role-2" - assert credential["account_number"] == "111111111112" - - def test_parent_account_with_access_key(self): - credential = self.get_credentials("accounts_key.yml")[0] - assert credential["access_key"] == "example-access-key-1" - assert credential["secret_key"] == "example-secret-key-1" - - def test_child_account_with_access_key(self): - credential = self.get_credentials("accounts_key.yml")[1] - assert credential["access_key"] == "example-access-key-2" - assert credential["secret_key"] == "example-secret-key-2" - - def test_ecs_parent_account_with_role(self): - credential = self.get_credentials("ecs.yml")[0] - assert credential["role_name"] == "example-role-1" - assert credential["role_session_name"] == "censys-cloud-connector" - - def test_ecs_child_account_with_role(self): - credential = self.get_credentials("ecs.yml")[1] - assert credential["role_name"] == "example-role-2" - assert credential["role_session_name"] == "censys-cloud-connector" - - def test_accounts_minimum_required_fields(self): - settings = self.get_settings_file("accounts_inherit.yml") - setting = settings[0] - credentials = list(setting.get_credentials()) - assert len(settings) == 1 - assert len(credentials) == 3 - - def test_accounts_get_credentials_enumerates_all(self): - setting = self.get_settings_file("accounts_inherit.yml")[0] - for cred in setting.get_credentials(): - assert cred["account_number"] in [ - "111111111111", - "111111111112", - "111111111113", - ] - - def test_accounts_inherit_from_primary(self): - expected = { - "account_number": "111111111112", - "access_key": None, - "secret_key": None, - "role_name": "test-primary-role-name", - "role_session_name": "test-primary-role-session-name", - "ignore_tags": ["test-primary-ignore-tag-1"], - } - credential = self.get_credentials("accounts_inherit.yml")[1] - assert credential == expected - - def test_accounts_override_primary_values(self): - expected = { - "account_number": "111111111112", - "access_key": None, - "secret_key": None, - "role_name": "test-override-role", - "role_session_name": "test-override-session-name", - "ignore_tags": ["test-override-ignore-tag-1"], - } - credential = self.get_credentials("accounts_override.yml")[1] - assert credential == expected - - def test_accounts_do_not_inherit_keys(self): - credential = self.get_credentials("accounts_override.yml")[1] - assert credential["access_key"] is None - assert credential["secret_key"] is None - def test_provider_key(self): account = "123123123123" expected = (account,) settings = self.aws_settings({"account_number": account}) assert settings.get_provider_key() == expected - def test_ignore_tags_provider(self): - expected = ["test-provider-ignore-tag"] - settings = self.aws_settings( - { - "ignore_tags": expected, - } - ) - creds = next(settings.get_credentials()) - assert creds["ignore_tags"] == expected - - def test_ignore_tags_account_overrides_provider(self): - child = { - "account_number": "123123123123", - "ignore_tags": ["test-account-ignore-tag"], - } - primary = { - "ignore_tags": ["test-primary-ignore-tag"], - "accounts": [child], - } - settings = self.aws_settings(primary) - creds = list(settings.get_credentials()) - assert creds[0]["ignore_tags"] == primary["ignore_tags"] - assert creds[1]["ignore_tags"] == child["ignore_tags"] + # def test_ignore_tags_provider(self): + # expected = ["test-provider-ignore-tag"] + # settings = self.aws_settings( + # { + # "ignore_tags": expected, + # } + # ) + # creds = next(settings.get_credentials()) + # assert creds["ignore_tags"] == expected + + # def test_ignore_tags_account_overrides_provider(self): + # child = { + # "account_number": "123123123123", + # "ignore_tags": ["test-account-ignore-tag"], + # } + # primary = { + # "ignore_tags": ["test-primary-ignore-tag"], + # "accounts": [child], + # } + # settings = self.aws_settings(primary) + # creds = list(settings.get_credentials()) + # assert creds[0]["ignore_tags"] == primary["ignore_tags"] + # assert creds[1]["ignore_tags"] == child["ignore_tags"] diff --git a/tests/test_azure_connector.py b/tests/test_azure_connector.py index 0ed39f6..b30a65c 100644 --- a/tests/test_azure_connector.py +++ b/tests/test_azure_connector.py @@ -1,28 +1,27 @@ +import asyncio import json -from unittest import TestCase from unittest.mock import MagicMock +import asynctest import pytest +from asynctest import TestCase +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError +from azure.storage.blob import ContainerProperties +from azure.storage.blob.aio import BlobServiceClient from parameterized import parameterized +from censys.cloud_connectors.azure_connector import AzureCloudConnector from censys.cloud_connectors.azure_connector.enums import AzureResourceTypes +from censys.cloud_connectors.azure_connector.settings import AzureSpecificSettings from censys.cloud_connectors.common.enums import ProviderEnum from tests.base_connector_case import BaseConnectorCase -failed_import = False -try: - from azure.core.exceptions import ClientAuthenticationError, HttpResponseError - from censys.cloud_connectors.azure_connector import AzureCloudConnector - from censys.cloud_connectors.azure_connector.settings import AzureSpecificSettings -except ImportError: - failed_import = True - - -@pytest.mark.skipif(failed_import, reason="Azure SDK not installed") class TestAzureCloudConnector(BaseConnectorCase, TestCase): connector: AzureCloudConnector connector_cls = AzureCloudConnector + test_subscription_id: str + test_credentials: MagicMock def setUp(self) -> None: super().setUp() @@ -34,21 +33,27 @@ def setUp(self) -> None: } self.connector = AzureCloudConnector(self.settings) # Set subscription_id as its required for certain calls - self.connector.subscription_id = self.data["TEST_CREDS"]["subscription_id"] - self.connector.credentials = self.mocker.MagicMock() + self.test_subscription_id = self.data["TEST_CREDS"]["subscription_id"] + self.test_credentials = self.mocker.MagicMock() self.connector.provider_settings = test_azure_settings def mock_asset(self, data: dict) -> MagicMock: asset = self.mocker.MagicMock() for key, value in data.items(): - asset.__setattr__(key, value) + if isinstance(value, dict): + setattr(asset, key, self.mock_asset(value)) + else: + setattr(asset, key, value) asset.as_dict.return_value = data return asset def mock_client(self, client_name: str) -> MagicMock: - return self.mocker.patch( - f"censys.cloud_connectors.azure_connector.connector.{client_name}" + mock = self.mocker.patch( + f"censys.cloud_connectors.azure_connector.connector.{client_name}", + new_callable=asynctest.MagicMock, ) + mock.return_value.close = asynctest.CoroutineMock() + return mock def mock_healthcheck(self) -> MagicMock: """Mock the healthcheck. @@ -61,7 +66,7 @@ def mock_healthcheck(self) -> MagicMock: ) @parameterized.expand([(ClientAuthenticationError,)]) - def test_scan_fail(self, exception): + async def test_scan_fail(self, exception): # Mock super().scan() mock_scan = self.mocker.patch.object( self.connector.__class__.__bases__[0], @@ -72,13 +77,17 @@ def test_scan_fail(self, exception): # Actual call with pytest.raises(exception): - self.connector.scan() + await self.connector.scan( + self.connector.provider_settings, # type: ignore[arg-type] + self.test_credentials, + self.test_subscription_id, + ) # Assertions mock_scan.assert_called_once() self.assert_healthcheck_called(mock_healthcheck) - def test_scan_all(self): + async def test_scan_all(self): # Test data test_single_subscription = self.data["TEST_CREDS"] test_multiple_subscriptions = test_single_subscription.copy() @@ -96,13 +105,13 @@ def test_scan_all(self): provider_settings: dict[tuple, AzureSpecificSettings] = { p.get_provider_key(): p for p in test_azure_settings } - self.connector.settings.providers[self.connector.provider] = provider_settings + self.connector.settings.providers[self.connector.provider] = provider_settings # type: ignore[arg-type] # Mock scan mock_scan = self.mocker.patch.object(self.connector, "scan") # Actual call - self.connector.scan_all() + await self.connector.scan_all() # Assertions assert mock_scan.call_count == 3 @@ -113,10 +122,10 @@ def test_format_label(self): test_asset = self.mock_asset({"location": test_location}) # Actual call - label = self.connector.format_label(test_asset) + label = self.connector.format_label(test_asset, self.test_subscription_id) # Assertions - assert label == f"AZURE: {self.connector.subscription_id}/{test_location}" + assert label == f"AZURE: {self.test_subscription_id}/{test_location}" def test_format_label_no_location(self): # Test data @@ -125,21 +134,24 @@ def test_format_label_no_location(self): # Actual call with pytest.raises(ValueError, match="Asset has no location"): - self.connector.format_label(test_asset) + self.connector.format_label(test_asset, self.test_subscription_id) - def test_get_seeds(self): + async def test_get_seeds(self): # Test data self.connector.provider_settings = AzureSpecificSettings.from_dict( self.data["TEST_CREDS"] ) - seed_scanners = { - AzureResourceTypes.PUBLIC_IP_ADDRESSES: self.mocker.Mock(), - AzureResourceTypes.CONTAINER_GROUPS: self.mocker.Mock(), - AzureResourceTypes.SQL_SERVERS: self.mocker.Mock(), - AzureResourceTypes.DNS_ZONES: self.mocker.Mock(), - } # Mock + seed_scanners = { + AzureResourceTypes.PUBLIC_IP_ADDRESSES: asynctest.MagicMock(), + AzureResourceTypes.CONTAINER_GROUPS: asynctest.MagicMock(), + AzureResourceTypes.SQL_SERVERS: asynctest.MagicMock(), + AzureResourceTypes.DNS_ZONES: asynctest.MagicMock(), + } + for scanner in seed_scanners.values(): + scanner.return_value = asyncio.Future() + scanner.return_value.set_result(None) self.mocker.patch.object( self.connector, "seed_scanners", @@ -147,19 +159,21 @@ def test_get_seeds(self): ) # Actual call - self.connector.get_seeds() + await self.connector.get_seeds(self.connector.provider_settings) # Assertions for mock in self.connector.seed_scanners.values(): mock.assert_called_once() - def test_get_seeds_ignore(self): + async def test_get_seeds_ignore(self): # Test data self.connector.provider_settings = AzureSpecificSettings.from_dict( self.data["TEST_CREDS_IGNORE"] ) + + # Mock seed_scanners = { - resource_type: self.mocker.Mock() + resource_type: asynctest.MagicMock() for resource_type in [ AzureResourceTypes.PUBLIC_IP_ADDRESSES, AzureResourceTypes.CONTAINER_GROUPS, @@ -167,8 +181,9 @@ def test_get_seeds_ignore(self): AzureResourceTypes.DNS_ZONES, ] } - - # Mock + for scanner in seed_scanners.values(): + scanner.return_value = asyncio.Future() + scanner.return_value.set_result(None) self.mocker.patch.object( self.connector, "seed_scanners", @@ -176,16 +191,19 @@ def test_get_seeds_ignore(self): ) # Actual call - self.connector.get_seeds() + await self.connector.get_seeds(self.connector.provider_settings) # Assertions for resource_type, mock in self.connector.seed_scanners.items(): - if resource_type in self.connector.provider_settings.ignore: + if ( + self.connector.provider_settings.ignore + and resource_type in self.connector.provider_settings.ignore + ): mock.assert_not_called() else: mock.assert_called_once() - def test_get_ip_addresses(self): + async def test_get_ip_addresses(self): # Test data test_list_all_response = [] test_seed_values = [] @@ -195,28 +213,36 @@ def test_get_ip_addresses(self): test_ip_response["ip_address"] = ip_address test_seed_values.append(ip_address) test_list_all_response.append(self.mock_asset(test_ip_response)) - test_label = self.connector.format_label(test_list_all_response[0]) + test_label = self.connector.format_label( + test_list_all_response[0], self.test_subscription_id + ) # Mock list_all mock_network_client = self.mock_client("NetworkManagementClient") - mock_public_ips = self.mocker.patch.object( - mock_network_client.return_value, "public_ip_addresses" + mock_public_ips_list_all = asynctest.MagicMock() + mock_public_ips_list_all.__aiter__.return_value = test_list_all_response + mock_network_client.return_value.public_ip_addresses.list_all.return_value = ( + mock_public_ips_list_all ) - mock_public_ips.list_all.return_value = test_list_all_response # Actual call - self.connector.get_ip_addresses() + await self.connector.get_ip_addresses( + self.connector.provider_settings, # type: ignore[arg-type] + self.test_credentials, + self.test_subscription_id, + AzureResourceTypes.PUBLIC_IP_ADDRESSES, + ) # Assertions mock_network_client.assert_called_with( - self.connector.credentials, self.connector.subscription_id + self.test_credentials, self.test_subscription_id ) - mock_public_ips.list_all.assert_called_once() + mock_public_ips_list_all.__aiter__.assert_called_once() self.assert_seeds_with_values( self.connector.seeds[test_label], test_seed_values ) - def test_get_clusters(self): + async def test_get_clusters(self): # Test data test_list_response = [] test_seed_values = [] @@ -232,28 +258,36 @@ def test_get_clusters(self): test_seed_values.append(domain) test_container_response["ip_address"] = ip_address_copy test_list_response.append(self.mock_asset(test_container_response)) - test_label = self.connector.format_label(test_list_response[0]) + test_label = self.connector.format_label( + test_list_response[0], self.test_subscription_id + ) # Mock list mock_container_client = self.mock_client("ContainerInstanceManagementClient") - mock_container_groups = self.mocker.patch.object( - mock_container_client.return_value, "container_groups" + mock_container_groups_list = asynctest.MagicMock() + mock_container_groups_list.__aiter__.return_value = test_list_response + mock_container_client.return_value.container_groups.list.return_value = ( + mock_container_groups_list ) - mock_container_groups.list.return_value = test_list_response # Actual call - self.connector.get_clusters() + await self.connector.get_clusters( + self.connector.provider_settings, # type: ignore[arg-type] + self.test_credentials, + self.test_subscription_id, + AzureResourceTypes.CONTAINER_GROUPS, + ) # Assertions mock_container_client.assert_called_with( - self.connector.credentials, self.connector.subscription_id + self.test_credentials, self.test_subscription_id ) - mock_container_groups.list.assert_called_once() + mock_container_groups_list.__aiter__.assert_called_once() self.assert_seeds_with_values( self.connector.seeds[test_label], test_seed_values ) - def test_get_sql_servers(self): + async def test_get_sql_servers(self): # Test data test_list_response = [] test_seed_values = [] @@ -263,29 +297,39 @@ def test_get_sql_servers(self): test_server_response["fully_qualified_domain_name"] = domain test_seed_values.append(domain) test_list_response.append(self.mock_asset(test_server_response)) - test_label = self.connector.format_label(test_list_response[0]) + test_label = self.connector.format_label( + test_list_response[0], self.test_subscription_id + ) # Mock list mock_sql_client = self.mock_client("SqlManagementClient") - mock_servers = self.mocker.patch.object(mock_sql_client.return_value, "servers") - mock_servers.list.return_value = test_list_response + mock_servers_list = asynctest.MagicMock() + mock_servers_list.__aiter__.return_value = test_list_response + mock_sql_client.return_value.servers.list.return_value = mock_servers_list # Actual call - self.connector.get_sql_servers() + await self.connector.get_sql_servers( + self.connector.provider_settings, # type: ignore[arg-type] + self.test_credentials, + self.test_subscription_id, + AzureResourceTypes.SQL_SERVERS, + ) # Assertions mock_sql_client.assert_called_with( - self.connector.credentials, self.connector.subscription_id + self.test_credentials, self.test_subscription_id ) - mock_servers.list.assert_called_once() + mock_servers_list.__aiter__.assert_called_once() self.assert_seeds_with_values( self.connector.seeds[test_label], test_seed_values ) - def test_get_dns_records(self): + async def test_get_dns_records(self): # Test data test_zones = [self.mock_asset(self.data["TEST_DNS_ZONE"])] - test_label = self.connector.format_label(test_zones[0]) + test_label = self.connector.format_label( + test_zones[0], self.test_subscription_id + ) test_list_records = [] test_seed_values = [] for data_key in [ @@ -306,26 +350,33 @@ def test_get_dns_records(self): # Mock list mock_dns_client = self.mock_client("DnsManagementClient") - mock_zones = self.mocker.patch.object(mock_dns_client.return_value, "zones") - mock_zones.list.return_value = test_zones - mock_records = self.mocker.patch.object( - mock_dns_client.return_value, "record_sets" + mock_zones_list = asynctest.MagicMock() + mock_zones_list.__aiter__.return_value = test_zones + mock_dns_client.return_value.zones.list.return_value = mock_zones_list + mock_record_sets = asynctest.MagicMock() + mock_record_sets.__aiter__.return_value = test_list_records + mock_dns_client.return_value.record_sets.list_all_by_dns_zone.return_value = ( + mock_record_sets ) - mock_records.list_all_by_dns_zone.return_value = test_list_records # Actual call - self.connector.get_dns_records() + await self.connector.get_dns_records( + self.connector.provider_settings, # type: ignore[arg-type] + self.test_credentials, + self.test_subscription_id, + AzureResourceTypes.DNS_ZONES, + ) # Assertions mock_dns_client.assert_called_with( - self.connector.credentials, self.connector.subscription_id + self.test_credentials, self.test_subscription_id ) - mock_records.list_all_by_dns_zone.assert_called_once() + mock_zones_list.__aiter__.assert_called_once() self.assert_seeds_with_values( self.connector.seeds[test_label], test_seed_values ) - def test_get_dns_records_fail(self): + async def test_get_dns_records_fail(self): # Mock list mock_dns_client = self.mock_client("DnsManagementClient") mock_zones = self.mocker.patch.object(mock_dns_client.return_value, "zones") @@ -333,11 +384,16 @@ def test_get_dns_records_fail(self): mock_error_logger = self.mocker.patch.object(self.connector.logger, "error") # Actual call - self.connector.get_dns_records() + await self.connector.get_dns_records( + self.connector.provider_settings, # type: ignore[arg-type] + self.test_credentials, + self.test_subscription_id, + AzureResourceTypes.DNS_ZONES, + ) # Assertions mock_dns_client.assert_called_with( - self.connector.credentials, self.connector.subscription_id + self.test_credentials, self.test_subscription_id ) mock_zones.list.assert_called_once() mock_error_logger.assert_called_once() @@ -345,16 +401,19 @@ def test_get_dns_records_fail(self): "Failed to get Azure DNS records" ) - def test_get_cloud_assets(self): + async def test_get_cloud_assets(self): # Test data self.connector.provider_settings = AzureSpecificSettings.from_dict( self.data["TEST_CREDS"] ) + + # Mock cloud_asset_scanners = { AzureResourceTypes.STORAGE_ACCOUNTS: self.mocker.Mock(), } - - # Mock + for scanner in cloud_asset_scanners.values(): + scanner.return_value = asyncio.Future() + scanner.return_value.set_result(None) self.mocker.patch.object( self.connector, "cloud_asset_scanners", @@ -362,13 +421,13 @@ def test_get_cloud_assets(self): ) # Actual call - self.connector.get_cloud_assets() + await self.connector.get_cloud_assets(self.connector.provider_settings) # Assertions for mock in cloud_asset_scanners.values(): mock.assert_called_once() - def test_get_cloud_assets_ignore(self): + async def test_get_cloud_assets_ignore(self): # Test data self.connector.provider_settings = AzureSpecificSettings.from_dict( self.data["TEST_CREDS_IGNORE"] @@ -380,12 +439,12 @@ def test_get_cloud_assets_ignore(self): ) # Actual call - self.connector.get_cloud_assets() + await self.connector.get_cloud_assets(self.connector.provider_settings) # Assertions mock_storage_container.assert_not_called() - def test_get_storage_containers(self): + async def test_get_storage_containers(self): # Test data test_storage_accounts = [] test_containers = [] @@ -402,35 +461,56 @@ def test_get_storage_containers(self): test_container = self.data["TEST_STORAGE_CONTAINER"].copy() test_container["name"] = f"test-{i}" test_containers.append(self.mock_asset(test_container)) - test_label = self.connector.format_label(test_storage_accounts[0]) + test_label = self.connector.format_label( + test_storage_accounts[0], self.test_subscription_id + ) # Mock list mock_storage_client = self.mock_client("StorageManagementClient") - mock_storage_accounts = self.mocker.patch.object( - mock_storage_client.return_value, "storage_accounts" - ) - mock_storage_accounts.list.return_value = test_storage_accounts + mock_storage_client_iter = asynctest.MagicMock() + mock_storage_client_iter.__aiter__.return_value = test_storage_accounts + mock_storage_client_iter = ( + mock_storage_client.return_value.storage_accounts.list.return_value + ) = mock_storage_client_iter # Mock list containers mock_blob_client = self.mock_client("BlobServiceClient") - mock_blob_client.return_value.list_containers.return_value = test_containers + mock_blob_client.return_value.list_containers.return_value.__aiter__.return_value = ( + test_containers + ) + mock_get_storage_container_url = asynctest.CoroutineMock() - def get_container_with_url(container): - container.url = f"https://{container.name}.blob.core.windows.net" - return container + def get_container_with_url( + _: BlobServiceClient, container: ContainerProperties + ) -> str: + return f"https://{container.name}.blob.core.windows.net" - mock_blob_client.return_value.get_container_client.side_effect = ( - get_container_with_url + mock_get_storage_container_url.side_effect = get_container_with_url + self.mocker.patch.object( + self.connector, + "get_storage_container_url", + new_callable=self.mocker.PropertyMock( + return_value=mock_get_storage_container_url + ), ) # Actual call - self.connector.get_storage_containers() + await self.connector.get_storage_containers( + self.connector.provider_settings, # type: ignore[arg-type] + self.test_credentials, + self.test_subscription_id, + AzureResourceTypes.STORAGE_ACCOUNTS, + ) # Assertions mock_storage_client.assert_called_with( - self.connector.credentials, self.connector.subscription_id + self.test_credentials, self.test_subscription_id ) assert mock_blob_client.call_count == len(test_storage_accounts) + assert ( + mock_blob_client.return_value.list_containers.return_value.__aiter__.call_count + == len(test_storage_accounts) + ) self.assert_seeds_with_values( self.connector.seeds[test_label], test_seed_values ) diff --git a/tests/test_cli.py b/tests/test_cli.py index 2139042..2d0154b 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -1,6 +1,7 @@ -from unittest import TestCase +import asyncio import pytest +from asynctest import TestCase from parameterized import parameterized from censys.cloud_connectors.common.cli import main @@ -95,7 +96,7 @@ def test_cli_config_provider_option(self): class TestScanCli(BaseCase, TestCase): - def test_cli_scan(self): + async def test_cli_scan(self): # Mock mock_args = self.mocker.MagicMock() mock_args.provider = None @@ -105,10 +106,11 @@ def test_cli_scan(self): "censys.cloud_connectors.common.cli.commands.scan.Settings" ) mock_settings.return_value.read_providers_config_file = self.mocker.Mock() - mock_settings.return_value.scan_all = self.mocker.Mock() + mock_settings.return_value.scan_all.return_value = asyncio.Future() + mock_settings.return_value.scan_all.return_value.set_result(None) # Actual call - scan.cli_scan(mock_args) + await scan.cli_scan(mock_args) # Assertions mock_settings.return_value.read_providers_config_file.assert_called_once_with( @@ -116,7 +118,7 @@ def test_cli_scan(self): ) mock_settings.return_value.scan_all.assert_called_once() - def test_cli_scan_provider_option(self): + async def test_cli_scan_provider_option(self): # Mock mock_args = self.mocker.MagicMock() mock_args.provider = [ProviderEnum.AZURE] @@ -126,10 +128,11 @@ def test_cli_scan_provider_option(self): "censys.cloud_connectors.common.cli.commands.scan.Settings" ) mock_settings.return_value.read_providers_config_file = self.mocker.Mock() - mock_settings.return_value.scan_all = self.mocker.Mock() + mock_settings.return_value.scan_all.return_value = asyncio.Future() + mock_settings.return_value.scan_all.return_value.set_result(None) # Actual call - scan.cli_scan(mock_args) + await scan.cli_scan(mock_args) # Assertions mock_settings.return_value.read_providers_config_file.assert_called_once_with( diff --git a/tests/test_connector.py b/tests/test_connector.py index 3372656..622ae11 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -1,6 +1,5 @@ -from unittest import TestCase - import pytest +from asynctest import TestCase from censys.common.exceptions import CensysAsmException, CensysException @@ -8,21 +7,25 @@ from censys.cloud_connectors.common.connector import CloudConnector from censys.cloud_connectors.common.enums import ProviderEnum from censys.cloud_connectors.common.seed import Seed -from censys.cloud_connectors.common.settings import Settings +from censys.cloud_connectors.common.settings import ProviderSpecificSettings, Settings from tests.base_connector_case import BaseConnectorCase class ExampleCloudConnector(CloudConnector): provider = ProviderEnum.GCP - def get_seeds(self): - return super().get_seeds() + async def get_seeds( + self, provider_specific_settings: ProviderSpecificSettings + ) -> None: + return await super().get_seeds(provider_specific_settings) - def get_cloud_assets(self) -> None: - return super().get_cloud_assets() + async def get_cloud_assets( + self, provider_specific_settings: ProviderSpecificSettings + ) -> None: + return await super().get_cloud_assets(provider_specific_settings) - def scan_all(self): - return super().scan_all() + async def scan_all(self): + return await super().scan_all() class TestCloudConnector(BaseConnectorCase, TestCase): @@ -72,19 +75,19 @@ def test_add_cloud_asset(self): assert len(self.connector.cloud_assets[test_uid]) == 1 assert self.connector.cloud_assets[test_uid].pop() == asset - def test_submit_seeds(self): + async def test_submit_seeds(self): seed = Seed(type="TEST", value="test-value", label="test-label") self.connector.add_seed(seed) replace_seeds_mock = self.mocker.patch.object( self.connector.seeds_api, "replace_seeds_by_label" ) - self.connector.submit_seeds() + await self.connector.submit_seeds() replace_seeds_mock.assert_called_once_with( self.connector.label_prefix + "test-label", [seed.to_dict()], ) - def test_fail_submit_seeds(self): + async def test_fail_submit_seeds(self): seed = Seed(type="TEST", value="test-value", label="test-label") self.connector.add_seed(seed) replace_seeds_mock = self.mocker.patch.object( @@ -92,10 +95,10 @@ def test_fail_submit_seeds(self): ) replace_seeds_mock.side_effect = CensysAsmException(404, "Test Exception") logger_mock = self.mocker.patch.object(self.connector.logger, "error") - self.connector.submit_seeds() + await self.connector.submit_seeds() logger_mock.assert_called_once() - def test_submit_cloud_assets(self): + async def test_submit_cloud_assets(self): # Test data asset = CloudAsset( type="TEST", value="test-value", csp_label=ProviderEnum.GCP, uid="test-uid" @@ -106,7 +109,7 @@ def test_submit_cloud_assets(self): add_cloud_mock = self.mocker.patch.object(self.connector, "_add_cloud_assets") # Actual call - self.connector.submit_cloud_assets() + await self.connector.submit_cloud_assets() # Assertions add_cloud_mock.assert_called_once_with( @@ -116,7 +119,7 @@ def test_submit_cloud_assets(self): } ) - def test_fail_submit_cloud_assets(self): + async def test_fail_submit_cloud_assets(self): # Test data asset = CloudAsset( type="TEST", value="test-value", csp_label=ProviderEnum.GCP, uid="test-uid" @@ -129,12 +132,12 @@ def test_fail_submit_cloud_assets(self): logger_mock = self.mocker.patch.object(self.connector.logger, "error") # Actual call - self.connector.submit_cloud_assets() + await self.connector.submit_cloud_assets() # Assertions logger_mock.assert_called_once() - def test_add_cloud_assets(self): + async def test_add_cloud_assets(self): # Test data test_data = { "cloudConnectorUid": "test-uid", @@ -145,14 +148,14 @@ def test_add_cloud_assets(self): post_mock.return_value.json.return_value = {"status": "success"} # Actual call - self.connector._add_cloud_assets(test_data) + await self.connector._add_cloud_assets(test_data) # Assertions post_mock.assert_called_once_with( self.connector._add_cloud_asset_path, json=test_data ) - def test_submit(self): + async def test_submit(self): # Mock submit_seeds_mock = self.mocker.patch.object(self.connector, "submit_seeds") submit_cloud_assets_mock = self.mocker.patch.object( @@ -161,13 +164,13 @@ def test_submit(self): self.mocker.patch.object(self.connector.settings, "dry_run", False) # Actual call - self.connector.submit() + await self.connector.submit() # Assertions submit_seeds_mock.assert_called_once() submit_cloud_assets_mock.assert_called_once() - def test_submit_dry_run(self): + async def test_submit_dry_run(self): # Mock submit_seeds_mock = self.mocker.patch.object(self.connector, "submit_seeds") submit_cloud_assets_mock = self.mocker.patch.object( @@ -176,13 +179,13 @@ def test_submit_dry_run(self): self.mocker.patch.object(self.connector.settings, "dry_run", True) # Actual call - self.connector.submit() + await self.connector.submit() # Assertions submit_seeds_mock.assert_not_called() submit_cloud_assets_mock.assert_not_called() - def test_scan(self): + async def test_scan(self): # Mock get_seeds_mock = self.mocker.patch.object(self.connector, "get_seeds") get_cloud_assets_mock = self.mocker.patch.object( @@ -191,7 +194,7 @@ def test_scan(self): submit_mock = self.mocker.patch.object(self.connector, "submit") # Actual call - self.connector.scan() + await self.connector.scan(None) # Assertions get_seeds_mock.assert_called_once() diff --git a/tests/test_gcp_connector.py b/tests/test_gcp_connector.py index 4c92fec..577b264 100644 --- a/tests/test_gcp_connector.py +++ b/tests/test_gcp_connector.py @@ -1,30 +1,25 @@ +import asyncio import json -from unittest import TestCase from unittest.mock import MagicMock -import pytest +import asynctest +from asynctest import TestCase +from google.cloud.securitycenter_v1.types import ListAssetsResponse from parameterized import parameterized from censys.cloud_connectors.common.enums import ProviderEnum - -# from censys.cloud_connectors.common.seed import Seed from censys.cloud_connectors.common.settings import Settings from censys.cloud_connectors.gcp_connector.connector import GcpCloudConnector from censys.cloud_connectors.gcp_connector.enums import GcpSecurityCenterResourceTypes from censys.cloud_connectors.gcp_connector.settings import GcpSpecificSettings from tests.base_connector_case import BaseConnectorCase -failed_import = False -try: - from google.cloud.securitycenter_v1.types import ListAssetsResponse -except ImportError: - failed_import = True - -@pytest.mark.skipif(failed_import, reason="Failed to import gcp dependencies") class TestGcpConnector(BaseConnectorCase, TestCase): connector: GcpCloudConnector connector_cls = GcpCloudConnector + test_organization_id: str + test_credentials: dict def setUp(self) -> None: super().setUp() @@ -44,9 +39,9 @@ def setUp(self) -> None: test_gcp_settings.get_provider_key(): test_gcp_settings } self.connector = GcpCloudConnector(self.settings) - self.connector.organization_id = self.data["TEST_CREDS"]["organization_id"] - self.connector.credentials = self.mocker.MagicMock() self.connector.provider_settings = test_gcp_settings + self.test_organization_id = self.data["TEST_CREDS"]["organization_id"] + self.test_credentials = self.mocker.MagicMock() # def tearDown(self) -> None: # # Reset the deaultdicts as they are immutable @@ -66,7 +61,7 @@ def mock_list_assets_result( Returns: ListAssetsResponse.ListAssetsResult: The test ListAssetsResult object. """ - return ListAssetsResponse.ListAssetsResult.from_json(json.dumps(data)) + return ListAssetsResponse.ListAssetsResult.from_json(json.dumps(data)) # type: ignore def mock_healthcheck(self) -> MagicMock: """Mock the healthcheck. @@ -88,29 +83,25 @@ def test_init(self): assert self.connector.label_prefix == "GCP: " assert self.connector.settings == self.settings - def test_scan(self): + async def test_scan(self): # Mock mock_credentials = self.mocker.patch( "censys.cloud_connectors.gcp_connector.connector.service_account.Credentials.from_service_account_file", ) - mock_sc_client = self.mocker.patch( - "censys.cloud_connectors.gcp_connector.connector.securitycenter_v1.SecurityCenterClient" - ) mock_scan = self.mocker.patch.object( self.connector.__class__.__bases__[0], "scan" ) mock_healthcheck = self.mock_healthcheck() # Actual call - self.connector.scan() + await self.connector.scan(self.connector.provider_settings) # type: ignore[arg-type] # Assertions mock_credentials.assert_called_once() - mock_sc_client.assert_called_once() mock_scan.assert_called_once() self.assert_healthcheck_called(mock_healthcheck) - def test_credentials_fail(self): + async def test_credentials_fail(self): # Mock mock_credentials = self.mocker.patch( "censys.cloud_connectors.gcp_connector.connector.service_account.Credentials.from_service_account_file", @@ -123,7 +114,7 @@ def test_credentials_fail(self): mock_healthcheck = self.mock_healthcheck() # Actual call - self.connector.scan() + await self.connector.scan(self.connector.provider_settings) # type: ignore[arg-type] # Assertions mock_credentials.assert_called_once() @@ -131,7 +122,7 @@ def test_credentials_fail(self): mock_scan.assert_not_called() self.assert_healthcheck_called(mock_healthcheck) - def test_scan_all(self): + async def test_scan_all(self): # Test data test_creds = self.data["TEST_CREDS"].copy() second_test_creds = test_creds.copy() @@ -143,13 +134,13 @@ def test_scan_all(self): provider_settings: dict[tuple, GcpSpecificSettings] = { p.get_provider_key(): p for p in test_gcp_settings } - self.connector.settings.providers[self.connector.provider] = provider_settings + self.connector.settings.providers[self.connector.provider] = provider_settings # type: ignore[arg-type] # Mock mock_scan = self.mocker.patch.object(self.connector, "scan") # Actual call - self.connector.scan_all() + await self.connector.scan_all() # Assertions assert mock_scan.call_count == len(provider_settings) @@ -168,26 +159,31 @@ def test_format_label(self, data_key: str): test_result = self.mock_list_assets_result(self.data[data_key]) # Actual call - label = self.connector.format_label(test_result) + label = self.connector.format_label( + self.connector.provider_settings, test_result # type: ignore[arg-type] + ) # Assertions - assert label == f"GCP: {self.connector.organization_id}/censys-cc-test-project" + assert label == f"GCP: {self.test_organization_id}/censys-cc-test-project" @parameterized.expand([("test-filter")]) - def test_list_assets(self, filter: str): + async def test_list_assets(self, filter: str): # Mock - mock_sc_client = self.mocker.patch( + mock_sc_client = asynctest.MagicMock( "censys.cloud_connectors.gcp_connector.connector.securitycenter_v1.SecurityCenterClient" ) - self.connector.security_center_client = mock_sc_client.return_value + mock_sc_client.return_value.list_assets.return_value = asyncio.Future() + mock_sc_client.return_value.list_assets.return_value.set_result(None) # Actual call - self.connector.list_assets(filter) + await self.connector.list_assets( + self.connector.provider_settings, mock_sc_client.return_value, filter # type: ignore[arg-type] + ) # Assertions mock_sc_client.return_value.list_assets.assert_called_once_with( request={ - "parent": f"organizations/{self.connector.organization_id}", + "parent": f"organizations/{self.test_organization_id}", "filter": filter, } ) @@ -226,7 +222,7 @@ def test_get_compute_instances(self): self.connector.seeds[test_label], test_seed_values ) - def test_get_compute_addresses(self): + async def test_get_compute_addresses(self): # Test data test_list_assets_results = [] test_seed_values = [] @@ -239,25 +235,36 @@ def test_get_compute_addresses(self): test_list_assets_results.append( self.mock_list_assets_result(test_asset_result) ) - test_label = self.connector.format_label(test_list_assets_results[0]) + test_label = self.connector.format_label( + self.connector.provider_settings, test_list_assets_results[0] # type: ignore[arg-type] + ) # Mock - mock_list = self.mocker.patch.object( - self.connector, "list_assets", return_value=test_list_assets_results + mock_scc = self.mocker.Mock() + mock_iter = asynctest.MagicMock() + mock_iter.__aiter__.return_value = test_list_assets_results + mock_iter = self.mocker.patch.object( + self.connector, "list_assets", return_value=mock_iter ) # Actual call - self.connector.get_compute_addresses() + await self.connector.get_compute_addresses( + self.connector.provider_settings, # type: ignore[arg-type] + mock_scc, + GcpSecurityCenterResourceTypes.COMPUTE_ADDRESS, + ) # Assertions - mock_list.assert_called_once_with( - filter=GcpSecurityCenterResourceTypes.COMPUTE_ADDRESS.filter() + mock_iter.assert_called_once_with( + self.connector.provider_settings, + mock_scc, + filter=GcpSecurityCenterResourceTypes.COMPUTE_ADDRESS.filter(), ) self.assert_seeds_with_values( self.connector.seeds[test_label], test_seed_values ) - def test_get_container_clusters(self): + async def test_get_container_clusters(self): # Test data test_list_assets_results = [] test_seed_values = [] @@ -276,25 +283,36 @@ def test_get_container_clusters(self): test_list_assets_results.append( self.mock_list_assets_result(test_asset_result) ) - test_label = self.connector.format_label(test_list_assets_results[0]) + test_label = self.connector.format_label( + self.connector.provider_settings, test_list_assets_results[0] # type: ignore[arg-type] + ) # Mock - mock_list = self.mocker.patch.object( - self.connector, "list_assets", return_value=test_list_assets_results + mock_scc = self.mocker.Mock() + mock_iter = asynctest.MagicMock() + mock_iter.__aiter__.return_value = test_list_assets_results + mock_iter = self.mocker.patch.object( + self.connector, "list_assets", return_value=mock_iter ) # Actual call - self.connector.get_container_clusters() + await self.connector.get_container_clusters( + self.connector.provider_settings, # type: ignore[arg-type] + mock_scc, + GcpSecurityCenterResourceTypes.CONTAINER_CLUSTER, + ) # Assertions - mock_list.assert_called_once_with( - filter=GcpSecurityCenterResourceTypes.CONTAINER_CLUSTER.filter() + mock_iter.assert_called_once_with( + self.connector.provider_settings, + mock_scc, + filter=GcpSecurityCenterResourceTypes.CONTAINER_CLUSTER.filter(), ) self.assert_seeds_with_values( self.connector.seeds[test_label], test_seed_values ) - def test_get_cloud_sql_instances(self): + async def test_get_cloud_sql_instances(self): # Test data test_list_assets_results = [] test_seed_values = [] @@ -312,25 +330,36 @@ def test_get_cloud_sql_instances(self): test_list_assets_results.append( self.mock_list_assets_result(test_asset_result) ) - test_label = self.connector.format_label(test_list_assets_results[0]) + test_label = self.connector.format_label( + self.connector.provider_settings, test_list_assets_results[0] # type: ignore[arg-type] + ) # Mock - mock_list = self.mocker.patch.object( - self.connector, "list_assets", return_value=test_list_assets_results + mock_scc = self.mocker.Mock() + mock_iter = asynctest.MagicMock() + mock_iter.__aiter__.return_value = test_list_assets_results + mock_iter = self.mocker.patch.object( + self.connector, "list_assets", return_value=mock_iter ) # Actual call - self.connector.get_cloud_sql_instances() + await self.connector.get_cloud_sql_instances( + self.connector.provider_settings, # type: ignore[arg-type] + mock_scc, + GcpSecurityCenterResourceTypes.CLOUD_SQL_INSTANCE, + ) # Assertions - mock_list.assert_called_once_with( - filter=GcpSecurityCenterResourceTypes.CLOUD_SQL_INSTANCE.filter() + mock_iter.assert_called_once_with( + self.connector.provider_settings, + mock_scc, + filter=GcpSecurityCenterResourceTypes.CLOUD_SQL_INSTANCE.filter(), ) self.assert_seeds_with_values( self.connector.seeds[test_label], test_seed_values ) - def test_get_dns_records(self): + async def test_get_dns_records(self): # Test data test_list_assets_results = [] test_seed_values = [] @@ -342,39 +371,49 @@ def test_get_dns_records(self): test_list_assets_results.append( self.mock_list_assets_result(test_asset_result) ) - test_label = self.connector.format_label(test_list_assets_results[0]) + test_label = self.connector.format_label( + self.connector.provider_settings, test_list_assets_results[0] # type: ignore[arg-type] + ) # Mock - mock_list = self.mocker.patch.object( - self.connector, "list_assets", return_value=test_list_assets_results + mock_scc = self.mocker.Mock() + mock_iter = asynctest.MagicMock() + mock_iter.__aiter__.return_value = test_list_assets_results + mock_iter = self.mocker.patch.object( + self.connector, "list_assets", return_value=mock_iter ) # Actual call - self.connector.get_dns_records() + await self.connector.get_dns_records( + self.connector.provider_settings, # type: ignore[arg-type] + mock_scc, + GcpSecurityCenterResourceTypes.DNS_ZONE, + ) # Assertions - mock_list.assert_called_once_with( - filter=GcpSecurityCenterResourceTypes.DNS_ZONE.filter() + mock_iter.assert_called_once_with( + self.connector.provider_settings, + mock_scc, + filter=GcpSecurityCenterResourceTypes.DNS_ZONE.filter(), ) self.assert_seeds_with_values( self.connector.seeds[test_label], test_seed_values ) - def test_get_seeds(self): + async def test_get_seeds(self): # Test data self.connector.provider_settings = GcpSpecificSettings.from_dict( self.data["TEST_CREDS"] ) + # Mock seed_scanners = { - GcpSecurityCenterResourceTypes.COMPUTE_INSTANCE: self.mocker.Mock(), - GcpSecurityCenterResourceTypes.COMPUTE_ADDRESS: self.mocker.Mock(), - GcpSecurityCenterResourceTypes.CONTAINER_CLUSTER: self.mocker.Mock(), - GcpSecurityCenterResourceTypes.CLOUD_SQL_INSTANCE: self.mocker.Mock(), - GcpSecurityCenterResourceTypes.DNS_ZONE: self.mocker.Mock(), + GcpSecurityCenterResourceTypes.COMPUTE_INSTANCE: asynctest.CoroutineMock(), + GcpSecurityCenterResourceTypes.COMPUTE_ADDRESS: asynctest.CoroutineMock(), + GcpSecurityCenterResourceTypes.CONTAINER_CLUSTER: asynctest.CoroutineMock(), + GcpSecurityCenterResourceTypes.CLOUD_SQL_INSTANCE: asynctest.CoroutineMock(), + GcpSecurityCenterResourceTypes.DNS_ZONE: asynctest.CoroutineMock(), } - - # Mock self.mocker.patch.object( self.connector, "seed_scanners", @@ -382,27 +421,26 @@ def test_get_seeds(self): ) # Actual call - self.connector.get_seeds() + await self.connector.get_seeds(self.connector.provider_settings) # Assertions for mock in self.connector.seed_scanners.values(): mock.assert_called_once() - def test_get_seeds_ignore(self): + async def test_get_seeds_ignore(self): # Test data self.connector.provider_settings = GcpSpecificSettings.from_dict( self.data["TEST_CREDS_IGNORE"] ) + # Mock seed_scanners = { - GcpSecurityCenterResourceTypes.COMPUTE_INSTANCE: self.mocker.Mock(), - GcpSecurityCenterResourceTypes.COMPUTE_ADDRESS: self.mocker.Mock(), - GcpSecurityCenterResourceTypes.CONTAINER_CLUSTER: self.mocker.Mock(), - GcpSecurityCenterResourceTypes.CLOUD_SQL_INSTANCE: self.mocker.Mock(), - GcpSecurityCenterResourceTypes.DNS_ZONE: self.mocker.Mock(), + GcpSecurityCenterResourceTypes.COMPUTE_INSTANCE: asynctest.CoroutineMock(), + GcpSecurityCenterResourceTypes.COMPUTE_ADDRESS: asynctest.CoroutineMock(), + GcpSecurityCenterResourceTypes.CONTAINER_CLUSTER: asynctest.CoroutineMock(), + GcpSecurityCenterResourceTypes.CLOUD_SQL_INSTANCE: asynctest.CoroutineMock(), + GcpSecurityCenterResourceTypes.DNS_ZONE: asynctest.CoroutineMock(), } - - # Mock self.mocker.patch.object( self.connector, "seed_scanners", @@ -410,16 +448,16 @@ def test_get_seeds_ignore(self): ) # Actual call - self.connector.get_seeds() + await self.connector.get_seeds(self.connector.provider_settings) # Assertions for resource_type, mock in self.connector.seed_scanners.items(): - if resource_type in self.connector.provider_settings.ignore: + if resource_type in self.connector.provider_settings.ignore: # type: ignore mock.assert_not_called() else: mock.assert_called_once() - def test_get_storage_buckets(self): + async def test_get_storage_buckets(self): # Test data test_list_assets_results = [] test_buckets = [] @@ -431,19 +469,30 @@ def test_get_storage_buckets(self): test_list_assets_results.append( self.mock_list_assets_result(test_asset_result) ) - test_label = self.connector.format_label(test_list_assets_results[0]) + test_label = self.connector.format_label( + self.connector.provider_settings, test_list_assets_results[0] # type: ignore[arg-type] + ) # Mock - mock_list = self.mocker.patch.object( - self.connector, "list_assets", return_value=test_list_assets_results + mock_scc = self.mocker.Mock() + mock_iter = asynctest.MagicMock() + mock_iter.__aiter__.return_value = test_list_assets_results + mock_iter = self.mocker.patch.object( + self.connector, "list_assets", return_value=mock_iter ) # Actual call - self.connector.get_storage_buckets() + await self.connector.get_storage_buckets( + self.connector.provider_settings, # type: ignore[arg-type] + mock_scc, + GcpSecurityCenterResourceTypes.STORAGE_BUCKET, + ) # Assertions - mock_list.assert_called_once_with( - filter=GcpSecurityCenterResourceTypes.STORAGE_BUCKET.filter() + mock_iter.assert_called_once_with( + self.connector.provider_settings, + mock_scc, + filter=GcpSecurityCenterResourceTypes.STORAGE_BUCKET.filter(), ) assert len(self.connector.cloud_assets[test_label]) == len(test_buckets) for bucket in self.connector.cloud_assets[test_label]: @@ -454,16 +503,16 @@ def test_get_storage_buckets(self): ) assert "accountNumber" in bucket.scan_data - def test_get_cloud_assets(self): + async def test_get_cloud_assets(self): # Test data self.connector.provider_settings = GcpSpecificSettings.from_dict( self.data["TEST_CREDS"] ) - cloud_asset_scanners = { - GcpSecurityCenterResourceTypes.STORAGE_BUCKET: self.mocker.Mock(), - } # Mock + cloud_asset_scanners = { + GcpSecurityCenterResourceTypes.STORAGE_BUCKET: asynctest.CoroutineMock(), + } self.mocker.patch.object( self.connector, "cloud_asset_scanners", @@ -471,13 +520,13 @@ def test_get_cloud_assets(self): ) # Actual call - self.connector.get_cloud_assets() + await self.connector.get_cloud_assets(self.connector.provider_settings) # Assertions for mock in cloud_asset_scanners.values(): mock.assert_called_once() - def test_get_cloud_assets_ignore(self): + async def test_get_cloud_assets_ignore(self): # Test data self.connector.provider_settings = GcpSpecificSettings.from_dict( self.data["TEST_CREDS_IGNORE"] @@ -490,7 +539,7 @@ def test_get_cloud_assets_ignore(self): ) # Actual call - self.connector.get_cloud_assets() + await self.connector.get_cloud_assets(self.connector.provider_settings) # Assertions mock_storage_bucket.assert_not_called() diff --git a/tests/test_settings.py b/tests/test_settings.py index e76973c..a4dbed0 100644 --- a/tests/test_settings.py +++ b/tests/test_settings.py @@ -1,8 +1,10 @@ +import asyncio from collections import OrderedDict from tempfile import NamedTemporaryFile -from unittest import TestCase +import asynctest import pytest +from asynctest import TestCase from parameterized import parameterized from censys.cloud_connectors.common.enums import ProviderEnum @@ -83,16 +85,17 @@ def test_write_providers_config_file(self, file_name): assert_same_yaml(original_file, temp_file.name) @parameterized.expand(list(ProviderEnum)) - def test_scan_all(self, provider: ProviderEnum): + async def test_scan_all(self, provider: ProviderEnum): self.settings.providers[provider] = {} - mock_connector = self.mocker.MagicMock() - mock_connector().scan_all.return_value = [] + mock_connector = asynctest.MagicMock() + mock_connector().scan_all.return_value = asyncio.Future() + mock_connector().scan_all.return_value.set_result([]) mock_provider = self.mocker.MagicMock() mock_provider.__connector__ = mock_connector mock_import_module = self.mocker.patch( "importlib.import_module", return_value=mock_provider ) - self.settings.scan_all() + await self.settings.scan_all() mock_import_module.assert_called_once_with(provider.module_path()) mock_connector().scan_all.assert_called_once() From 31bb73c7aeace2a9984035c3b7f9ad4f7221827a Mon Sep 17 00:00:00 2001 From: Aidan Holland Date: Mon, 26 Jun 2023 11:30:22 -0400 Subject: [PATCH 07/19] fix: typing issues with connector --- .pre-commit-config.yaml | 8 +++---- pyproject.toml | 2 ++ .../aws_connector/connector.py | 23 +++++++++---------- .../aws_connector/provider_setup.py | 7 +++--- .../azure_connector/connector.py | 8 +++---- .../azure_connector/provider_setup.py | 2 +- .../common/cli/provider_setup.py | 2 +- .../gcp_connector/connector.py | 12 ++++++---- .../gcp_connector/provider_setup.py | 2 +- tests/test_aws_connector.py | 11 +++------ 10 files changed, 37 insertions(+), 40 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 0d726cc..e2a8aa5 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -7,7 +7,7 @@ # fail_fast: true # Uncomment to exit on first error repos: - repo: https://github.com/antonbabenko/pre-commit-terraform - rev: v1.78.0 + rev: v1.81.0 hooks: - id: terraform_fmt name: terraform-fmt @@ -44,17 +44,17 @@ repos: - id: mixed-line-ending - id: trailing-whitespace - repo: https://github.com/igorshubovych/markdownlint-cli - rev: v0.34.0 + rev: v0.35.0 hooks: - id: markdownlint-fix name: fix markdownlint - repo: https://github.com/asottile/pyupgrade - rev: v3.3.2 + rev: v3.7.0 hooks: - id: pyupgrade args: [--py39-plus] - repo: https://github.com/python-poetry/poetry - rev: 1.4.0 + rev: 1.5.0 hooks: - id: poetry-check # - id: poetry-lock diff --git a/pyproject.toml b/pyproject.toml index e9f9d88..4114a34 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -158,6 +158,8 @@ module = [ "google.oauth2", # Azure "azure.*", + # AWS + "boto3", ] ignore_missing_imports = true diff --git a/src/censys/cloud_connectors/aws_connector/connector.py b/src/censys/cloud_connectors/aws_connector/connector.py index a2a8d80..3a35d84 100644 --- a/src/censys/cloud_connectors/aws_connector/connector.py +++ b/src/censys/cloud_connectors/aws_connector/connector.py @@ -1,7 +1,7 @@ """AWS Cloud Connector.""" import contextlib from collections.abc import AsyncGenerator, Sequence -from typing import Optional +from typing import Optional, Union from aiobotocore.session import get_session from botocore.exceptions import ClientError @@ -31,7 +31,7 @@ from .credentials import AwsCredentials, get_aws_credentials from .enums import AwsResourceTypes, SeedLabel -from .settings import AwsSpecificSettings +from .settings import AwsAccount, AwsSpecificSettings VALID_RECORD_TYPES = ["A", "CNAME"] IGNORED_TAGS = ["censys-cloud-connector-ignore"] @@ -73,13 +73,13 @@ def __init__(self, settings: Settings): self.ignored_tags: list[str] = [] self.global_ignored_tags: set[str] = set(IGNORED_TAGS) - async def scan( + async def scan( # type: ignore self, provider_setting: AwsSpecificSettings, credentials: AwsCredentials, region: str, ignored_tags: list[str], - ): + ): # type: ignore """Scan AWS. Args: @@ -103,21 +103,20 @@ async def scan_all(self): provider_settings: dict[ tuple, AwsSpecificSettings ] = self.settings.providers.get( - self.provider, {} - ) # type: ignore + self.provider, {} # type: ignore + ) for provider_setting in provider_settings.values(): - accounts = provider_setting.accounts - if not accounts: + accounts: list[Union[None, AwsAccount]] + if provider_setting.accounts: + # Scan the default account first, then scan the rest + accounts = [None, *provider_setting.accounts] + else: # If no accounts are configured, scan the default account accounts = [None] - else: - # Scan the default account first, then scan the rest - accounts = [None, *accounts] # Scan each account in the provider for account in accounts: - # Use the account number from the account if it is configured if account is not None: self.account_number = account.account_number diff --git a/src/censys/cloud_connectors/aws_connector/provider_setup.py b/src/censys/cloud_connectors/aws_connector/provider_setup.py index f5878be..a14dfad 100644 --- a/src/censys/cloud_connectors/aws_connector/provider_setup.py +++ b/src/censys/cloud_connectors/aws_connector/provider_setup.py @@ -15,7 +15,6 @@ has_boto = False try: - # note: boto exceptions are dynamically created; there aren't actual classes to import from botocore.exceptions import ClientError @@ -524,7 +523,7 @@ def select_profile(self): Returns: str: Profile name. """ - profile = "" + profile: Optional[str] = None try: choices = self.get_profile_choices() @@ -539,7 +538,7 @@ def select_profile(self): choices, default=os.getenv("AWS_PROFILE"), ) - if type(choice) is dict: + if isinstance(choice, dict): # if there is only 1 choice prompt select one returns a dict, otherwise a string profile = choice["value"] else: @@ -576,5 +575,5 @@ def setup(self): ) answer = answers.get("answer") - if func := choices.get(answer): + if answer and (func := choices.get(answer)): func() diff --git a/src/censys/cloud_connectors/azure_connector/connector.py b/src/censys/cloud_connectors/azure_connector/connector.py index 59a3b41..b2a0294 100644 --- a/src/censys/cloud_connectors/azure_connector/connector.py +++ b/src/censys/cloud_connectors/azure_connector/connector.py @@ -51,7 +51,7 @@ def __init__(self, settings: Settings): AzureResourceTypes.STORAGE_ACCOUNTS: self.get_storage_containers, } - async def scan( + async def scan( # type: ignore self, provider_settings: AzureSpecificSettings, credentials: ClientSecretCredential, @@ -82,9 +82,9 @@ async def scan_all(self): """Scan all Azure Subscriptions.""" provider_settings: dict[ tuple, AzureSpecificSettings - ] = self.settings.providers.get( - self.provider, {} - ) # type: ignore + ] = self.settings.providers.get( # type: ignore + self.provider, {} # type: ignore + ) for provider_setting in provider_settings.values(): credentials = ClientSecretCredential( tenant_id=provider_setting.tenant_id, diff --git a/src/censys/cloud_connectors/azure_connector/provider_setup.py b/src/censys/cloud_connectors/azure_connector/provider_setup.py index 0bc442d..81043fa 100644 --- a/src/censys/cloud_connectors/azure_connector/provider_setup.py +++ b/src/censys/cloud_connectors/azure_connector/provider_setup.py @@ -252,5 +252,5 @@ def setup(self): ) get_credentials_from = answers.get("get_credentials_from") - if func := choices.get(get_credentials_from): + if get_credentials_from and (func := choices.get(get_credentials_from)): func() diff --git a/src/censys/cloud_connectors/common/cli/provider_setup.py b/src/censys/cloud_connectors/common/cli/provider_setup.py index 309af9d..1316020 100644 --- a/src/censys/cloud_connectors/common/cli/provider_setup.py +++ b/src/censys/cloud_connectors/common/cli/provider_setup.py @@ -156,7 +156,7 @@ def on_success(_: Any) -> None: progress.start_task(task) _task = task - @backoff.on_exception(**default_kwargs) + @backoff.on_exception(**default_kwargs) # type: ignore def _method(): res = method(*args, **kwargs) progress.advance(task) diff --git a/src/censys/cloud_connectors/gcp_connector/connector.py b/src/censys/cloud_connectors/gcp_connector/connector.py index d024858..92be4e3 100644 --- a/src/censys/cloud_connectors/gcp_connector/connector.py +++ b/src/censys/cloud_connectors/gcp_connector/connector.py @@ -1,7 +1,7 @@ """Gcp Cloud Connector.""" import json from pathlib import Path -from typing import Optional +from typing import Optional, Union from google.api_core import exceptions from google.cloud import securitycenter_v1 @@ -50,7 +50,7 @@ def __init__(self, settings: Settings): GcpSecurityCenterResourceTypes.STORAGE_BUCKET: self.get_storage_buckets, } - async def scan(self, provider_settings: GcpSpecificSettings): + async def scan(self, provider_settings: GcpSpecificSettings): # type: ignore """Scan Gcp. Scans Gcp for assets and seeds. @@ -105,8 +105,8 @@ async def scan_all(self): provider_settings: dict[ tuple, GcpSpecificSettings ] = self.settings.providers.get( - self.provider, {} - ) # type: ignore + self.provider, {} # type: ignore + ) for provider_setting in provider_settings.values(): await self.scan(provider_setting) @@ -360,7 +360,9 @@ async def get_storage_buckets( if (bucket_name := resource_properties.get("id")) and ( project_number := resource_properties.get("projectNumber") ): - scan_data = {"accountNumber": int(project_number)} + scan_data: dict[str, Union[str, int]] = { + "accountNumber": int(project_number) + } if ( project_name := list_assets_result.asset.security_center_properties.resource_project_display_name ): diff --git a/src/censys/cloud_connectors/gcp_connector/provider_setup.py b/src/censys/cloud_connectors/gcp_connector/provider_setup.py index a9c9493..b7dbf05 100644 --- a/src/censys/cloud_connectors/gcp_connector/provider_setup.py +++ b/src/censys/cloud_connectors/gcp_connector/provider_setup.py @@ -795,5 +795,5 @@ def setup(self): ) get_credentials_from = answers.get("get_credentials_from") - if func := choices.get(get_credentials_from): + if get_credentials_from and (func := choices.get(get_credentials_from)): func() diff --git a/tests/test_aws_connector.py b/tests/test_aws_connector.py index 49937b6..b9fd5a0 100644 --- a/tests/test_aws_connector.py +++ b/tests/test_aws_connector.py @@ -1,26 +1,21 @@ import json from typing import Any -from unittest.mock import MagicMock, Mock, call +from unittest.mock import MagicMock, call import asynctest from asynctest import TestCase from botocore.exceptions import ClientError -from parameterized import parameterized from censys.cloud_connectors.aws_connector.connector import AwsCloudConnector from censys.cloud_connectors.aws_connector.credentials import ( AwsCredentials, get_aws_credentials, ) -from censys.cloud_connectors.aws_connector.enums import ( - AwsResourceTypes, - AwsServices, - SeedLabel, -) +from censys.cloud_connectors.aws_connector.enums import AwsResourceTypes, SeedLabel from censys.cloud_connectors.aws_connector.settings import AwsSpecificSettings from censys.cloud_connectors.common.cloud_asset import AwsStorageBucketAsset from censys.cloud_connectors.common.enums import ProviderEnum -from censys.cloud_connectors.common.seed import DomainSeed, IpSeed +from censys.cloud_connectors.common.seed import IpSeed from tests.base_connector_case import BaseConnectorCase From eb0eefb9f90de945aeec4b11ce400c848d8297e4 Mon Sep 17 00:00:00 2001 From: Aidan Holland Date: Mon, 26 Jun 2023 15:14:31 -0400 Subject: [PATCH 08/19] chore: clean up connectors and tests --- .../aws_connector/connector.py | 1 + .../aws_connector/settings.py | 28 ------------------- .../azure_connector/connector.py | 1 + .../gcp_connector/connector.py | 1 + tests/test_aws_connector.py | 2 ++ tests/test_aws_settings.py | 24 ---------------- 6 files changed, 5 insertions(+), 52 deletions(-) diff --git a/src/censys/cloud_connectors/aws_connector/connector.py b/src/censys/cloud_connectors/aws_connector/connector.py index 3a35d84..dd0bf29 100644 --- a/src/censys/cloud_connectors/aws_connector/connector.py +++ b/src/censys/cloud_connectors/aws_connector/connector.py @@ -107,6 +107,7 @@ async def scan_all(self): ) for provider_setting in provider_settings.values(): + self.provider_settings = provider_setting accounts: list[Union[None, AwsAccount]] if provider_setting.accounts: # Scan the default account first, then scan the rest diff --git a/src/censys/cloud_connectors/aws_connector/settings.py b/src/censys/cloud_connectors/aws_connector/settings.py index 59fa031..ee87921 100644 --- a/src/censys/cloud_connectors/aws_connector/settings.py +++ b/src/censys/cloud_connectors/aws_connector/settings.py @@ -107,31 +107,3 @@ def from_dict(cls, data: dict): data["accounts"][index] = AwsAccount(**account) return cls(**data) - - # def get_credentials(self): - # """Generator for all configured credentials. Any values within the accounts block will take precedence over the overall values. - - # Yields: - # dict[str, Any] - # """ - # yield { - # "account_number": self.account_number, - # "access_key": self.access_key, - # "secret_key": self.secret_key, - # "role_name": self.role_name, - # "role_session_name": self.role_session_name, - # "ignore_tags": self.ignore_tags, - # } - - # if self.accounts: - # for account in self.accounts: - # yield { - # "account_number": (account.account_number or self.account_number), - # "access_key": account.access_key, - # "secret_key": account.secret_key, - # "role_name": (account.role_name or self.role_name), - # "role_session_name": ( - # account.role_session_name or self.role_session_name - # ), - # "ignore_tags": (account.ignore_tags or self.ignore_tags), - # } diff --git a/src/censys/cloud_connectors/azure_connector/connector.py b/src/censys/cloud_connectors/azure_connector/connector.py index b2a0294..37dd462 100644 --- a/src/censys/cloud_connectors/azure_connector/connector.py +++ b/src/censys/cloud_connectors/azure_connector/connector.py @@ -86,6 +86,7 @@ async def scan_all(self): self.provider, {} # type: ignore ) for provider_setting in provider_settings.values(): + self.provider_settings = provider_setting credentials = ClientSecretCredential( tenant_id=provider_setting.tenant_id, client_id=provider_setting.client_id, diff --git a/src/censys/cloud_connectors/gcp_connector/connector.py b/src/censys/cloud_connectors/gcp_connector/connector.py index 92be4e3..59b3120 100644 --- a/src/censys/cloud_connectors/gcp_connector/connector.py +++ b/src/censys/cloud_connectors/gcp_connector/connector.py @@ -108,6 +108,7 @@ async def scan_all(self): self.provider, {} # type: ignore ) for provider_setting in provider_settings.values(): + self.provider_settings = provider_setting await self.scan(provider_setting) def format_label( diff --git a/tests/test_aws_connector.py b/tests/test_aws_connector.py index b9fd5a0..9c026e9 100644 --- a/tests/test_aws_connector.py +++ b/tests/test_aws_connector.py @@ -741,6 +741,8 @@ def test_no_ignored_tag(self): self.connector.ignored_tags = ["non-existent-tag"] assert not self.connector.has_ignored_tag(["tag-name"]) + # TODO: Add ignore tag tests for updated credential logic + def test_extract_tags_from_tagset(self): tag_set = [{"Key": "tag-1"}, {"Key": "tag-2"}] tags = self.connector.extract_tags_from_tagset(tag_set) # type: ignore[arg-type] diff --git a/tests/test_aws_settings.py b/tests/test_aws_settings.py index cc73a72..e2ef6c4 100644 --- a/tests/test_aws_settings.py +++ b/tests/test_aws_settings.py @@ -74,27 +74,3 @@ def test_provider_key(self): expected = (account,) settings = self.aws_settings({"account_number": account}) assert settings.get_provider_key() == expected - - # def test_ignore_tags_provider(self): - # expected = ["test-provider-ignore-tag"] - # settings = self.aws_settings( - # { - # "ignore_tags": expected, - # } - # ) - # creds = next(settings.get_credentials()) - # assert creds["ignore_tags"] == expected - - # def test_ignore_tags_account_overrides_provider(self): - # child = { - # "account_number": "123123123123", - # "ignore_tags": ["test-account-ignore-tag"], - # } - # primary = { - # "ignore_tags": ["test-primary-ignore-tag"], - # "accounts": [child], - # } - # settings = self.aws_settings(primary) - # creds = list(settings.get_credentials()) - # assert creds[0]["ignore_tags"] == primary["ignore_tags"] - # assert creds[1]["ignore_tags"] == child["ignore_tags"] From 8158c8fa9582881cdfc89d958e181463ec0f82e8 Mon Sep 17 00:00:00 2001 From: Aidan Holland Date: Mon, 26 Jun 2023 15:33:35 -0400 Subject: [PATCH 09/19] fix(cli): ignore error on no ip address verification --- src/censys/cloud_connectors/azure_connector/provider_setup.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/censys/cloud_connectors/azure_connector/provider_setup.py b/src/censys/cloud_connectors/azure_connector/provider_setup.py index 81043fa..fe7a003 100644 --- a/src/censys/cloud_connectors/azure_connector/provider_setup.py +++ b/src/censys/cloud_connectors/azure_connector/provider_setup.py @@ -1,4 +1,5 @@ """Azure specific setup CLI.""" +import contextlib from typing import Optional from azure.core.exceptions import ClientAuthenticationError, HttpResponseError @@ -193,7 +194,8 @@ def verify_service_principal(self, provider_setting: AzureSpecificSettings) -> b for subscription_id in provider_setting.subscription_id: network_client = NetworkManagementClient(credential, subscription_id) res = network_client.public_ip_addresses.list_all() - next(res) + with contextlib.suppress(StopIteration): + next(res) return True def setup_with_cli(self) -> None: From bc5c56710b5de99a503a5683bb78747bd6b233e6 Mon Sep 17 00:00:00 2001 From: Aidan Holland Date: Mon, 26 Jun 2023 16:34:05 -0400 Subject: [PATCH 10/19] fix(azure): improved error messages for Microsoft.Network resource provider --- .../azure_connector/connector.py | 42 ++++++++++++------- 1 file changed, 27 insertions(+), 15 deletions(-) diff --git a/src/censys/cloud_connectors/azure_connector/connector.py b/src/censys/cloud_connectors/azure_connector/connector.py index 37dd462..a9f3703 100644 --- a/src/censys/cloud_connectors/azure_connector/connector.py +++ b/src/censys/cloud_connectors/azure_connector/connector.py @@ -10,6 +10,7 @@ from azure.identity.aio import ClientSecretCredential from azure.mgmt.containerinstance.aio import ContainerInstanceManagementClient from azure.mgmt.dns.aio import DnsManagementClient +from azure.mgmt.dns.models import ZoneListResult from azure.mgmt.network.aio import NetworkManagementClient from azure.mgmt.sql.aio import SqlManagementClient from azure.mgmt.storage.aio import StorageManagementClient @@ -98,7 +99,8 @@ async def scan_all(self): await self.scan(provider_setting, credentials, subscription_id) except Exception as e: self.logger.error( - f"Unable to scan Azure Subscription {subscription_id}. Error: {e}" + f"Unable to scan Azure Subscription {subscription_id}." + f" Error: {e}" ) self.dispatch_event(EventTypeEnum.SCAN_FAILED, exception=e) @@ -221,6 +223,27 @@ async def get_sql_servers( await sql_client.close() + async def _list_dns_zones( + self, dns_client: DnsManagementClient + ) -> AsyncGenerator[ZoneListResult, None]: + """List all DNS zones. + + Args: + dns_client (DnsManagementClient): Azure DNS client. + + Yields: + AsyncGenerator[ZoneListResult, None]: DNS zones. + """ + try: + async for zone in dns_client.zones.list(): + yield zone + except HttpResponseError as error: + self.logger.error( + f"Failed to get Azure DNS records: {error.reason} or the subscription" + " does not have access to the Microsoft.Network resource provider." + ) + await dns_client.close() + async def get_dns_records( self, _: AzureSpecificSettings, @@ -237,18 +260,7 @@ async def get_dns_records( """ dns_client = DnsManagementClient(credentials, subscription_id) # type: ignore - try: - # zones = list(dns_client.zones.list()) - zones = dns_client.zones.list() - except HttpResponseError as error: - # TODO: Better error handling here - self.logger.error( - f"Failed to get Azure DNS records: {error.message}", exc_info=True - ) - await dns_client.close() - return - - async for zone in zones: + async for zone in self._list_dns_zones(dns_client): zone_dict = zone.as_dict() # TODO: Do we need to check if zone is public? (ie. do we care?) if zone_dict.get("zone_type") != "Public": # pragma: no cover @@ -305,7 +317,6 @@ async def _list_containers( f"Failed to get Azure containers for {account.name}: {error.message}" ) await blob_service_client.close() - return async def get_storage_container_url( self, blob_service_client: BlobServiceClient, container: ContainerProperties @@ -377,7 +388,8 @@ async def get_storage_containers( self.add_cloud_asset(container_asset, service=current_service) except ServiceRequestError as error: # pragma: no cover self.logger.error( - f"Failed to get Azure container {container} for {account.name}: {error.message}" + f"Failed to get Azure container {container} for {account.name}:" + f" {error.message}" ) await blob_service_client.close() From 3a2139f0af1624498d893c7fbd3e7f6bde5d0c1d Mon Sep 17 00:00:00 2001 From: Aidan Holland Date: Mon, 26 Jun 2023 16:44:32 -0400 Subject: [PATCH 11/19] chore: cleanup strings --- .github/workflows/release-ci.yaml | 2 +- .../aws_connector/connector.py | 8 ++-- .../cloud_connectors/aws_connector/enums.py | 5 +- .../aws_connector/provider_setup.py | 38 +++++++++++---- .../azure_connector/provider_setup.py | 11 +++-- .../gcp_connector/connector.py | 11 +++-- .../cloud_connectors/gcp_connector/enums.py | 26 ++++++++-- .../gcp_connector/provider_setup.py | 17 +++++-- tests/base_connector_case.py | 14 +++--- tests/test_gcp_enums.py | 48 +++++++++++++++---- 10 files changed, 136 insertions(+), 44 deletions(-) diff --git a/.github/workflows/release-ci.yaml b/.github/workflows/release-ci.yaml index 3804da2..92b308b 100644 --- a/.github/workflows/release-ci.yaml +++ b/.github/workflows/release-ci.yaml @@ -13,7 +13,7 @@ jobs: with: tag: "latest" version: true - + release: uses: ./.github/workflows/reusable-release-pypi.yaml secrets: inherit diff --git a/src/censys/cloud_connectors/aws_connector/connector.py b/src/censys/cloud_connectors/aws_connector/connector.py index dd0bf29..5ae8025 100644 --- a/src/censys/cloud_connectors/aws_connector/connector.py +++ b/src/censys/cloud_connectors/aws_connector/connector.py @@ -161,7 +161,8 @@ async def scan_all(self): ) except Exception as e: self.logger.error( - f"Unable to scan account {self.account_number} in region {region}. Error: {e}" + f"Unable to scan account {self.account_number} in region" + f" {region}. Error: {e}" ) self.dispatch_event(EventTypeEnum.SCAN_FAILED, exception=e) @@ -441,7 +442,8 @@ async def describe_network_interfaces( if self.network_interfaces_ignored_tags(network): self.logger.debug( - f"Skipping ignored tag for network interface {network_interface_id}" + "Skipping ignored tag for network interface" + f" {network_interface_id}" ) continue @@ -477,7 +479,7 @@ async def get_resource_tags_paginated( ec2: EC2Client # type: ignore[no-redef] try: - async for page in ec2.get_paginator("describe_tags",).paginate( + async for page in ec2.get_paginator("describe_tags").paginate( Filters=[ { "Name": "resource-type", diff --git a/src/censys/cloud_connectors/aws_connector/enums.py b/src/censys/cloud_connectors/aws_connector/enums.py index 125f53a..35f21be 100644 --- a/src/censys/cloud_connectors/aws_connector/enums.py +++ b/src/censys/cloud_connectors/aws_connector/enums.py @@ -75,7 +75,10 @@ class AwsMessages(str, Enum): "Specify either access_key and secret_key or role_name and role_session_name" ) PROVIDER_SETUP_DOC_LINK = "https://censys-cloud-connector.readthedocs.io/en/stable/aws/provider_setup.html" - TEMPORARY_CREDENTIAL_ERROR = "A temporary credential has been detected which is not supported. Please read our documentation on how to configure AWS IAM." + TEMPORARY_CREDENTIAL_ERROR = ( + "A temporary credential has been detected which is not supported. Please read" + " our documentation on how to configure AWS IAM." + ) def __str__(self) -> str: """Get the string representation of the message. diff --git a/src/censys/cloud_connectors/aws_connector/provider_setup.py b/src/censys/cloud_connectors/aws_connector/provider_setup.py index a14dfad..8845db4 100644 --- a/src/censys/cloud_connectors/aws_connector/provider_setup.py +++ b/src/censys/cloud_connectors/aws_connector/provider_setup.py @@ -47,7 +47,8 @@ def ask_role_session_name(self) -> str: str: Role session name. """ self.print_info( - "AWS recommends setting 'Role Session Name' to the name or identifier that is associated with the user who is using your application." + "AWS recommends setting 'Role Session Name' to the name or identifier that" + " is associated with the user who is using your application." ) answers = self.prompt( { @@ -55,7 +56,9 @@ def ask_role_session_name(self) -> str: "name": "answer", "message": "Enter role session name to use:", "default": AwsDefaults.ROLE_SESSION_NAME.value, - "invalid_message": "Role session name must be between 1 and 64 characters.", + "invalid_message": ( + "Role session name must be between 1 and 64 characters." + ), "validate": lambda name: re.match(r"^[\w+=,.@-]{2,64}$", name), } ) @@ -107,7 +110,10 @@ def ask_stackset(self, exclude_id: str) -> list[dict]: "name": "accounts", "max_height": "70%", "message": "Select accounts(s):", - "instruction": "Use and to scroll, to select, + to select all, to continue.", + "instruction": ( + "Use and to scroll, to select, + to" + " select all, to continue." + ), "choices": accounts, "multiselect": True, "validate": lambda regions: len(regions) > 0, @@ -142,7 +148,10 @@ def ask_list_accounts(self, exclude_id: str): "name": "accounts", "max_height": "70%", "message": "Select accounts(s):", - "instruction": "Use and to scroll, to select, + to select all, to continue.", + "instruction": ( + "Use and to scroll, to select, + to" + " select all, to continue." + ), "choices": accounts, "multiselect": True, "validate": lambda regions: len(regions) > 0, @@ -245,7 +254,10 @@ def ask_role_name(self) -> str: "name": "answer", "message": "Enter an existing IAM Role name to use:", "default": AwsDefaults.ROLE_NAME.value, - "invalid_message": "Role name must be between 1 and 64 characters. Use alphanumeric and '+=,.@-_' characters.", + "invalid_message": ( + "Role name must be between 1 and 64 characters. Use alphanumeric" + " and '+=,.@-_' characters." + ), "validate": self.aws.valid_role_name, } ) @@ -319,7 +331,8 @@ def ask_regions(self) -> list[str]: regions = self.aws.get_regions() except Exception: self.print_error( - "Unable to load regions from AWS. Please check your credentials and try again." + "Unable to load regions from AWS. Please check your credentials and try" + " again." ) exit(1) @@ -329,7 +342,10 @@ def ask_regions(self) -> list[str]: "name": "regions", "max_height": "70%", "message": "Select region(s):", - "instruction": "Fuzzy search enabled. Use and to scroll, to select, + to select all, to continue.", + "instruction": ( + "Fuzzy search enabled. Use and to scroll, to" + " select, + to select all, to continue." + ), "choices": regions, "multiselect": True, "validate": lambda regions: len(regions) > 0, @@ -411,7 +427,10 @@ def ask_load_credentials(self, profile: str) -> bool: { "type": "confirm", "name": "answer", - "message": f"Do you want to run the Cloud Connector using the credentials from profile '{profile}'?", + "message": ( + "Do you want to run the Cloud Connector using the credentials from" + f" profile '{profile}'?" + ), "default": False, } ) @@ -530,7 +549,8 @@ def select_profile(self): if len(choices) == 1: name = choices[0]["name"] self.print_info( - f"There is only one AWS credential profile called '{name}' available." + f"There is only one AWS credential profile called '{name}'" + " available." ) choice = self.prompt_select_one( diff --git a/src/censys/cloud_connectors/azure_connector/provider_setup.py b/src/censys/cloud_connectors/azure_connector/provider_setup.py index fe7a003..58d5c7c 100644 --- a/src/censys/cloud_connectors/azure_connector/provider_setup.py +++ b/src/censys/cloud_connectors/azure_connector/provider_setup.py @@ -62,7 +62,9 @@ def prompt_select_subscriptions( { "type": "confirm", "name": "use_subscription", - "message": f"Confirm subscription {subscriptions[0]['display_name']}:", + "message": ( + f"Confirm subscription {subscriptions[0]['display_name']}:" + ), "default": True, } ] @@ -150,7 +152,9 @@ def create_service_principal(self, subscriptions: list[dict]) -> Optional[dict]: { "type": "confirm", "name": "create_service_principal", - "message": "Confirm creation of service principal with above command:", + "message": ( + "Confirm creation of service principal with above command:" + ), "default": True, } ] @@ -213,7 +217,8 @@ def setup_with_cli(self) -> None: service_principal = self.create_service_principal(selected_subscriptions) if service_principal is None: self.print_error( - "Service principal not created. Please try again or manually create a service principal" + "Service principal not created. Please try again or manually create a" + " service principal" ) exit(1) diff --git a/src/censys/cloud_connectors/gcp_connector/connector.py b/src/censys/cloud_connectors/gcp_connector/connector.py index 59b3120..5e51e5e 100644 --- a/src/censys/cloud_connectors/gcp_connector/connector.py +++ b/src/censys/cloud_connectors/gcp_connector/connector.py @@ -96,7 +96,10 @@ async def scan(self, provider_settings: GcpSpecificSettings): # type: ignore ) except Exception as e: self.logger.error( - f"Unable to scan GCP organization {provider_settings.organization_id}. Error: {e}", + ( + "Unable to scan GCP organization" + f" {provider_settings.organization_id}. Error: {e}" + ), ) self.dispatch_event(EventTypeEnum.SCAN_FAILED, exception=e) @@ -180,7 +183,8 @@ async def get_compute_instances( network_interfaces = json.loads(network_interfaces) except json.JSONDecodeError: self.logger.error( - f"Failed to parse network_interfaces for {list_assets_result.asset.name}" + "Failed to parse network_interfaces for" + f" {list_assets_result.asset.name}" ) continue if ( @@ -260,7 +264,8 @@ async def get_container_clusters( private_cluster_config = json.loads(private_cluster_config) except json.decoder.JSONDecodeError: # pragma: no cover self.logger.debug( - f"Failed to parse privateClusterConfig: {private_cluster_config}" + "Failed to parse privateClusterConfig:" + f" {private_cluster_config}" ) continue if ip_address := private_cluster_config.get("publicEndpoint"): diff --git a/src/censys/cloud_connectors/gcp_connector/enums.py b/src/censys/cloud_connectors/gcp_connector/enums.py index 1a68a00..9708339 100644 --- a/src/censys/cloud_connectors/gcp_connector/enums.py +++ b/src/censys/cloud_connectors/gcp_connector/enums.py @@ -16,10 +16,19 @@ class GcloudCommands(str, Enum): DESCRIBE_ORGANIZATION = "organizations describe {organization_id}" GET_PROJECT_ANCESTORS = "projects get-ancestors {project_id}" LIST_SERVICE_ACCOUNTS = "iam service-accounts list" - ADD_ORG_IAM_POLICY = "organizations add-iam-policy-binding {organization_id} --member '{member}' --role '{role}'" - CREATE_SERVICE_ACCOUNT = "iam service-accounts create {name} --display-name '{display_name}' --description '{description}'" + ADD_ORG_IAM_POLICY = ( + "organizations add-iam-policy-binding {organization_id} --member '{member}'" + " --role '{role}'" + ) + CREATE_SERVICE_ACCOUNT = ( + "iam service-accounts create {name} --display-name '{display_name}'" + " --description '{description}'" + ) ENABLE_SERVICE_ACCOUNT = "iam service-accounts enable {service_account_email}" - CREATE_SERVICE_ACCOUNT_KEY = "iam service-accounts keys create {key_file} --iam-account {service_account_email}" + CREATE_SERVICE_ACCOUNT_KEY = ( + "iam service-accounts keys create {key_file} --iam-account" + " {service_account_email}" + ) def __str__(self) -> str: """Return the string representation of the command. @@ -133,11 +142,18 @@ def filter(self) -> str: class GcpMessages(str, Enum): """GCP messages.""" - INSTALL_GCLOUD_INSTRUCTIONS = "Please install the [link=https://cloud.google.com/sdk/docs/downloads-interactive]gcloud SDK[/link] before continuing." + INSTALL_GCLOUD_INSTRUCTIONS = ( + "Please install the" + " [link=https://cloud.google.com/sdk/docs/downloads-interactive]gcloud" + " SDK[/link] before continuing." + ) LOGIN_INSTRUCTIONS = ( f"Please login to your GCP account with the command: `{GcloudCommands.LOGIN}`." ) - LOGIN_TRY_AGAIN = "Please login and try again. Or run the above commands in the Google Cloud Console." + LOGIN_TRY_AGAIN = ( + "Please login and try again. Or run the above commands in the Google Cloud" + " Console." + ) SAVING_KEY = "Saving key file into ./secrets/" ERROR_UNABLE_TO_GET_ACCOUNTS = "Unable to get list of authenticated GCP Accounts." diff --git a/src/censys/cloud_connectors/gcp_connector/provider_setup.py b/src/censys/cloud_connectors/gcp_connector/provider_setup.py index b7dbf05..99c7ea2 100644 --- a/src/censys/cloud_connectors/gcp_connector/provider_setup.py +++ b/src/censys/cloud_connectors/gcp_connector/provider_setup.py @@ -128,7 +128,8 @@ def get_default_project_id_from_cli(self) -> Optional[str]: res = self.run_command(GcloudCommands.GET_CONFIG_VALUE.generate(key="project")) if res.returncode != 0: self.print_info( - "If you are unsure of the project id, go to https://console.cloud.google.com/iam-admin/settings." + "If you are unsure of the project id, go to" + " https://console.cloud.google.com/iam-admin/settings." ) return None return res.stdout.strip() @@ -308,7 +309,8 @@ def generate_role_binding_command( """ commands = [ # Adds a comment about scope - "# Grants the service account the required roles from the organization level" + "# Grants the service account the required roles from the organization" + " level" ] for role in roles: commands.append( @@ -551,7 +553,9 @@ def prompt_to_create_service_account( "message": "Confirm or name service account:", "default": "censys-cloud-connector", "validate": validate_service_account_name, - "invalid_message": "Service account name must be between 6 and 30 characters.", + "invalid_message": ( + "Service account name must be between 6 and 30 characters." + ), } ] ) @@ -608,7 +612,12 @@ def verify_service_account_permissions( def setup_with_cli(self) -> None: """Setup with gcloud CLI.""" self.print_info( - "Before you begin you'll need to have identified the following:\n [info]-[/info] The Google Cloud organization administrator account which will execute scripts that configure the Censys Cloud Connector.\n [info]-[/info] The project that will be used to run the Censys Cloud Connector. Please note that the cloud connector will be scoped to the organization." + "Before you begin you'll need to have identified the following:\n " + " [info]-[/info] The Google Cloud organization administrator account which" + " will execute scripts that configure the Censys Cloud Connector.\n " + " [info]-[/info] The project that will be used to run the Censys Cloud" + " Connector. Please note that the cloud connector will be scoped to the" + " organization." ) if not self.is_gcloud_installed(): self.print_warning(GcpMessages.INSTALL_GCLOUD_INSTRUCTIONS) diff --git a/tests/base_connector_case.py b/tests/base_connector_case.py index 1aa920c..c766831 100644 --- a/tests/base_connector_case.py +++ b/tests/base_connector_case.py @@ -68,9 +68,10 @@ def assert_healthcheck_called(self, healthcheck: MagicMock, count: int = 1): assert ( healthcheck.call_count == count ), f"Expected {count} calls, got {healthcheck.call_count}" - assert ( - healthcheck.return_value.__enter__.call_count == count - ), f"Expected {count} calls, got {healthcheck.return_value.__enter__.call_count}" + assert healthcheck.return_value.__enter__.call_count == count, ( + f"Expected {count} calls, got" + f" {healthcheck.return_value.__enter__.call_count}" + ) assert ( healthcheck.return_value.__exit__.call_count == count ), f"Expected {count} calls, got {healthcheck.return_value.__exit__.call_count}" @@ -97,9 +98,10 @@ def test_init(self) -> None: assert ( self.connector.seeds_api._api_key == self.default_settings["censys_api_key"] ) - assert ( - self.connector._add_cloud_asset_path == add_cloud_asset_path - ), f"Expected {add_cloud_asset_path}, got {self.connector._add_cloud_asset_path}" + assert self.connector._add_cloud_asset_path == add_cloud_asset_path, ( + f"Expected {add_cloud_asset_path}, got" + f" {self.connector._add_cloud_asset_path}" + ) # Assert that the connector has no seeds and cloud_assets assert list(self.connector.seeds.keys()) == [] diff --git a/tests/test_gcp_enums.py b/tests/test_gcp_enums.py index 22675f6..0d2bda0 100644 --- a/tests/test_gcp_enums.py +++ b/tests/test_gcp_enums.py @@ -74,7 +74,10 @@ class TestEnums(TestCase): ), ( GcloudCommands.ADD_ORG_IAM_POLICY, - "gcloud organizations add-iam-policy-binding my-org --member 'user:my-user' --role 'roles/viewer'", + ( + "gcloud organizations add-iam-policy-binding my-org --member" + " 'user:my-user' --role 'roles/viewer'" + ), { "organization_id": "my-org", "member": "user:my-user", @@ -83,7 +86,10 @@ class TestEnums(TestCase): ), ( GcloudCommands.ADD_ORG_IAM_POLICY, - "gcloud organizations add-iam-policy-binding my-org --member 'user:my-user' --role 'roles/viewer' --quiet", + ( + "gcloud organizations add-iam-policy-binding my-org --member" + " 'user:my-user' --role 'roles/viewer' --quiet" + ), { "organization_id": "my-org", "member": "user:my-user", @@ -93,7 +99,11 @@ class TestEnums(TestCase): ), ( GcloudCommands.CREATE_SERVICE_ACCOUNT, - "gcloud iam service-accounts create my-service-account --display-name 'My Service Account' --description 'My Service Account Description'", + ( + "gcloud iam service-accounts create my-service-account" + " --display-name 'My Service Account' --description 'My Service" + " Account Description'" + ), { "name": TEST_SERVICE_ACCOUNT, "display_name": "My Service Account", @@ -102,7 +112,11 @@ class TestEnums(TestCase): ), ( GcloudCommands.CREATE_SERVICE_ACCOUNT, - "gcloud iam service-accounts create my-service-account --display-name 'My Service Account' --description 'My Service Account Description' --project my-project", + ( + "gcloud iam service-accounts create my-service-account" + " --display-name 'My Service Account' --description 'My Service" + " Account Description' --project my-project" + ), { "name": TEST_SERVICE_ACCOUNT, "display_name": "My Service Account", @@ -112,12 +126,19 @@ class TestEnums(TestCase): ), ( GcloudCommands.ENABLE_SERVICE_ACCOUNT, - "gcloud iam service-accounts enable my-service-account@my-project.iam.gserviceaccount.com", + ( + "gcloud iam service-accounts enable" + " my-service-account@my-project.iam.gserviceaccount.com" + ), {"service_account_email": TEST_SERVICE_ACCOUNT_EMAIL}, ), ( GcloudCommands.CREATE_SERVICE_ACCOUNT_KEY, - "gcloud iam service-accounts keys create my-service-account.json --iam-account my-service-account@my-project.iam.gserviceaccount.com", + ( + "gcloud iam service-accounts keys create my-service-account.json" + " --iam-account" + " my-service-account@my-project.iam.gserviceaccount.com" + ), { "key_file": "my-service-account.json", "service_account_email": TEST_SERVICE_ACCOUNT_EMAIL, @@ -172,7 +193,10 @@ def test_gcp_api_ids_urls( ), ( GcpApiIds.SECURITYCENTER, - "gcloud services enable securitycenter.googleapis.com --project my-project", + ( + "gcloud services enable securitycenter.googleapis.com --project" + " my-project" + ), ), ] ) @@ -238,11 +262,17 @@ def test_gcp_roles( ), ( GcpSecurityCenterResourceTypes.DNS_ZONE, - 'securityCenterProperties.resource_type : "google.cloud.dns.ManagedZone"', + ( + "securityCenterProperties.resource_type :" + ' "google.cloud.dns.ManagedZone"' + ), ), ( GcpSecurityCenterResourceTypes.STORAGE_BUCKET, - 'securityCenterProperties.resource_type : "google.cloud.storage.Bucket"', + ( + "securityCenterProperties.resource_type :" + ' "google.cloud.storage.Bucket"' + ), ), ] ) From 64af14375ee6d4fb46464e36b677f4f8d52602ff Mon Sep 17 00:00:00 2001 From: Aidan Holland Date: Wed, 28 Jun 2023 10:15:37 -0400 Subject: [PATCH 12/19] chore: add compare workspaces script --- .flake8 | 3 +- poetry.lock | 321 +--------------------------------- pyproject.toml | 4 +- scripts/compare_workspaces.py | 144 +++++++++++++++ 4 files changed, 158 insertions(+), 314 deletions(-) create mode 100644 scripts/compare_workspaces.py diff --git a/.flake8 b/.flake8 index 6875069..53cc141 100644 --- a/.flake8 +++ b/.flake8 @@ -12,7 +12,8 @@ exclude = dist ignore = E203,E501,W503 per-file-ignores = - tests/*.py:D100,D101,D102,D103,D104,D107,DAR101 + tests/*.py:D100,D101,D102,D103,D104,D107,DAR101, + scripts/*.py:D100, # flake8-docstrings docstring-convention = google # darglint diff --git a/poetry.lock b/poetry.lock index e19255e..0dfb6d1 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,10 +1,9 @@ -# This file is automatically @generated by Poetry 1.4.0 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. [[package]] name = "adal" version = "1.2.7" description = "Note: This library is already replaced by MSAL Python, available here: https://pypi.org/project/msal/ .ADAL Python remains available here as a legacy. The ADAL for Python library makes it easy for python application to authenticate to Azure Active Directory (AAD) in order to access AAD protected web resources." -category = "dev" optional = false python-versions = "*" files = [ @@ -22,7 +21,6 @@ requests = ">=2.0.0,<3" name = "aioboto3" version = "11.2.0" description = "Async boto3 wrapper" -category = "dev" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -41,7 +39,6 @@ s3cse = ["cryptography (>=2.3.1)"] name = "aiobotocore" version = "2.5.0" description = "Async client for aws services using botocore and aiohttp" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -64,7 +61,6 @@ boto3 = ["boto3 (>=1.26.76,<1.26.77)"] name = "aiohttp" version = "3.8.4" description = "Async http client/server framework (asyncio)" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -173,7 +169,6 @@ speedups = ["Brotli", "aiodns", "cchardet"] name = "aioitertools" version = "0.11.0" description = "itertools and builtins for AsyncIO and mixed iterables" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -188,7 +183,6 @@ typing_extensions = {version = ">=4.0", markers = "python_version < \"3.10\""} name = "aiometer" version = "0.4.0" description = "A Python concurrency scheduling library, compatible with asyncio and trio" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -203,7 +197,6 @@ anyio = ">=3.2,<4.0" name = "aiosignal" version = "1.3.1" description = "aiosignal: a list of registered asynchronous callbacks" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -218,7 +211,6 @@ frozenlist = ">=1.1.0" name = "alabaster" version = "0.7.13" description = "A configurable sidebar-enabled Sphinx theme" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -230,7 +222,6 @@ files = [ name = "antlr4-python3-runtime" version = "4.9.3" description = "ANTLR 4.9.3 runtime for Python 3.7" -category = "dev" optional = false python-versions = "*" files = [ @@ -241,7 +232,6 @@ files = [ name = "anyio" version = "3.7.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -263,7 +253,6 @@ trio = ["trio (<0.22)"] name = "applicationinsights" version = "0.11.10" description = "This project extends the Application Insights API surface to support Python." -category = "dev" optional = false python-versions = "*" files = [ @@ -275,7 +264,6 @@ files = [ name = "appnope" version = "0.1.3" description = "Disable App Nap on macOS >= 10.9" -category = "dev" optional = false python-versions = "*" files = [ @@ -287,7 +275,6 @@ files = [ name = "argcomplete" version = "2.1.2" description = "Bash tab completion for argparse" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -303,7 +290,6 @@ test = ["coverage", "flake8", "mypy", "pexpect", "wheel"] name = "astor" version = "0.8.1" description = "Read/rewrite/write Python ASTs" -category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" files = [ @@ -315,7 +301,6 @@ files = [ name = "asttokens" version = "2.2.1" description = "Annotate AST trees with source code positions" -category = "dev" optional = false python-versions = "*" files = [ @@ -333,7 +318,6 @@ test = ["astroid", "pytest"] name = "async-timeout" version = "4.0.2" description = "Timeout context manager for asyncio programs" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -345,7 +329,6 @@ files = [ name = "asynctest" version = "0.13.0" description = "Enhance the standard unittest package with features for testing asyncio libraries" -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -357,7 +340,6 @@ files = [ name = "attrs" version = "23.1.0" description = "Classes Without Boilerplate" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -376,7 +358,6 @@ tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pyte name = "azure-appconfiguration" version = "1.1.1" description = "Microsoft App Configuration Data Library for Python" -category = "dev" optional = false python-versions = "*" files = [ @@ -395,7 +376,6 @@ async = ["aiodns (>=2.0)", "aiohttp (>=3.0)"] name = "azure-batch" version = "13.0.0" description = "Microsoft Azure Batch Client Library for Python" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -411,7 +391,6 @@ msrestazure = ">=0.4.32,<2.0.0" name = "azure-cli" version = "2.48.1" description = "Microsoft Azure Command-Line Tools" -category = "dev" optional = false python-versions = ">=3.7.0" files = [ @@ -524,7 +503,6 @@ xmltodict = ">=0.12,<1.0" name = "azure-cli-core" version = "2.48.1" description = "Microsoft Azure Command-Line Tools Core Module" -category = "dev" optional = false python-versions = ">=3.7.0" files = [ @@ -534,7 +512,7 @@ files = [ [package.dependencies] argcomplete = ">=2.0,<3.0" -azure-cli-telemetry = ">=1.0.0,<1.1.0" +azure-cli-telemetry = "==1.0.8.*" azure-mgmt-core = ">=1.2.0,<2" cryptography = "*" distro = {version = "*", markers = "sys_platform == \"linux\""} @@ -556,7 +534,6 @@ requests = {version = "*", extras = ["socks"]} name = "azure-cli-telemetry" version = "1.0.8" description = "Microsoft Azure CLI Telemetry Package" -category = "dev" optional = false python-versions = "*" files = [ @@ -572,7 +549,6 @@ portalocker = ">=1.6,<3" name = "azure-common" version = "1.1.28" description = "Microsoft Azure Client Library for Python (Common)" -category = "dev" optional = false python-versions = "*" files = [ @@ -584,7 +560,6 @@ files = [ name = "azure-core" version = "1.26.4" description = "Microsoft Azure Core Library for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -604,7 +579,6 @@ aio = ["aiohttp (>=3.0)"] name = "azure-cosmos" version = "3.2.0" description = "Azure Cosmos Python SDK" -category = "dev" optional = false python-versions = "*" files = [ @@ -620,7 +594,6 @@ six = ">=1.6" name = "azure-data-tables" version = "12.4.0" description = "Microsoft Azure Azure Data Tables Client Library for Python" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -636,7 +609,6 @@ msrest = ">=0.6.21" name = "azure-datalake-store" version = "0.0.52" description = "Azure Data Lake Store Filesystem Client Library for Python" -category = "dev" optional = false python-versions = "*" files = [ @@ -653,7 +625,6 @@ requests = ">=2.20.0" name = "azure-graphrbac" version = "0.60.0" description = "Microsoft Azure Graph RBAC Client Library for Python" -category = "dev" optional = false python-versions = "*" files = [ @@ -670,7 +641,6 @@ msrestazure = ">=0.4.32,<2.0.0" name = "azure-identity" version = "1.12.0" description = "Microsoft Azure Identity Library for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -689,7 +659,6 @@ six = ">=1.12.0" name = "azure-keyvault" version = "1.1.0" description = "Microsoft Azure Key Vault Client Library for Python" -category = "dev" optional = false python-versions = "*" files = [ @@ -709,7 +678,6 @@ requests = ">=2.18.4" name = "azure-keyvault-administration" version = "4.0.0b3" description = "Microsoft Azure Key Vault Administration Client Library for Python" -category = "dev" optional = false python-versions = "*" files = [ @@ -726,7 +694,6 @@ msrest = ">=0.6.21" name = "azure-keyvault-keys" version = "4.8.0b2" description = "Microsoft Azure Key Vault Keys Client Library for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -745,7 +712,6 @@ typing-extensions = ">=4.0.1" name = "azure-loganalytics" version = "0.1.1" description = "Microsoft Azure Log Analytics Client Library for Python" -category = "dev" optional = false python-versions = "*" files = [ @@ -761,7 +727,6 @@ msrest = ">=0.5.0" name = "azure-mgmt-advisor" version = "9.0.0" description = "Microsoft Azure Advisor Client Library for Python" -category = "dev" optional = false python-versions = "*" files = [ @@ -778,7 +743,6 @@ msrest = ">=0.5.0" name = "azure-mgmt-apimanagement" version = "3.0.0" description = "Microsoft Azure API Management Client Library for Python" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -795,7 +759,6 @@ msrest = ">=0.6.21" name = "azure-mgmt-appconfiguration" version = "3.0.0" description = "Microsoft Azure App Configuration Management Client Library for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -812,7 +775,6 @@ isodate = ">=0.6.1,<1.0.0" name = "azure-mgmt-appcontainers" version = "2.0.0" description = "Microsoft Azure Appcontainers Management Client Library for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -829,7 +791,6 @@ msrest = ">=0.7.1" name = "azure-mgmt-applicationinsights" version = "1.0.0" description = "Microsoft Azure Application Insights Management Client Library for Python" -category = "dev" optional = false python-versions = "*" files = [ @@ -846,7 +807,6 @@ msrest = ">=0.5.0" name = "azure-mgmt-authorization" version = "3.0.0" description = "Microsoft Azure Authorization Management Client Library for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -863,7 +823,6 @@ msrest = ">=0.7.1" name = "azure-mgmt-batch" version = "17.0.0" description = "Microsoft Azure Batch Management Client Library for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -880,7 +839,6 @@ msrest = ">=0.7.1" name = "azure-mgmt-batchai" version = "7.0.0b1" description = "Microsoft Azure Batchai Management Client Library for Python" -category = "dev" optional = false python-versions = "*" files = [ @@ -897,7 +855,6 @@ msrest = ">=0.6.21" name = "azure-mgmt-billing" version = "6.0.0" description = "Microsoft Azure Billing Client Library for Python" -category = "dev" optional = false python-versions = "*" files = [ @@ -914,7 +871,6 @@ msrest = ">=0.6.21" name = "azure-mgmt-botservice" version = "2.0.0" description = "Microsoft Azure Bot Service Client Library for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -931,7 +887,6 @@ msrest = ">=0.7.1" name = "azure-mgmt-cdn" version = "12.0.0" description = "Microsoft Azure CDN Management Client Library for Python" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -948,7 +903,6 @@ msrest = ">=0.6.21" name = "azure-mgmt-cognitiveservices" version = "13.3.0" description = "Microsoft Azure Cognitive Services Management Client Library for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -965,7 +919,6 @@ msrest = ">=0.7.1" name = "azure-mgmt-compute" version = "29.1.0" description = "Microsoft Azure Compute Management Client Library for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -982,7 +935,6 @@ msrest = ">=0.7.1" name = "azure-mgmt-consumption" version = "2.0.0" description = "Microsoft Azure Consumption Management Client Library for Python" -category = "dev" optional = false python-versions = "*" files = [ @@ -999,7 +951,6 @@ msrestazure = ">=0.4.20,<2.0.0" name = "azure-mgmt-containerinstance" version = "10.1.0b1" description = "Microsoft Azure Container Instance Client Library for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1016,7 +967,6 @@ msrest = ">=0.7.1" name = "azure-mgmt-containerregistry" version = "10.1.0" description = "Microsoft Azure Container Registry Client Library for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1033,7 +983,6 @@ msrest = ">=0.7.1" name = "azure-mgmt-containerservice" version = "22.0.0" description = "Microsoft Azure Container Service Management Client Library for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1050,7 +999,6 @@ isodate = ">=0.6.1,<1.0.0" name = "azure-mgmt-core" version = "1.4.0" description = "Microsoft Azure Management Core Library for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1065,7 +1013,6 @@ azure-core = ">=1.26.2,<2.0.0" name = "azure-mgmt-cosmosdb" version = "9.0.0" description = "Microsoft Azure Cosmos DB Management Client Library for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1082,7 +1029,6 @@ msrest = ">=0.7.1" name = "azure-mgmt-databoxedge" version = "1.0.0" description = "Microsoft Azure Databoxedge Management Client Library for Python" -category = "dev" optional = false python-versions = "*" files = [ @@ -1099,7 +1045,6 @@ msrest = ">=0.6.21" name = "azure-mgmt-datalake-analytics" version = "0.2.1" description = "Microsoft Azure Data Lake Analytics Management Client Library for Python" -category = "dev" optional = false python-versions = "*" files = [ @@ -1116,7 +1061,6 @@ msrestazure = ">=0.4.7,<2.0.0" name = "azure-mgmt-datalake-nspkg" version = "3.0.1" description = "Microsoft Azure Data Lake Management Namespace Package [Internal]" -category = "dev" optional = false python-versions = "*" files = [ @@ -1132,7 +1076,6 @@ azure-mgmt-nspkg = ">=3.0.0" name = "azure-mgmt-datalake-store" version = "0.5.0" description = "Microsoft Azure Data Lake Store Management Client Library for Python" -category = "dev" optional = false python-versions = "*" files = [ @@ -1149,7 +1092,6 @@ msrestazure = ">=0.4.27,<2.0.0" name = "azure-mgmt-datamigration" version = "10.0.0" description = "Microsoft Azure Data Migration Client Library for Python" -category = "dev" optional = false python-versions = "*" files = [ @@ -1166,7 +1108,6 @@ msrest = ">=0.6.21" name = "azure-mgmt-devtestlabs" version = "4.0.0" description = "Microsoft Azure DevTestLabs Management Client Library for Python" -category = "dev" optional = false python-versions = "*" files = [ @@ -1183,7 +1124,6 @@ msrestazure = ">=0.4.32,<2.0.0" name = "azure-mgmt-dns" version = "8.0.0" description = "Microsoft Azure DNS Management Client Library for Python" -category = "dev" optional = false python-versions = "*" files = [ @@ -1200,7 +1140,6 @@ msrest = ">=0.6.21" name = "azure-mgmt-eventgrid" version = "10.2.0b2" description = "Microsoft Azure Event Grid Management Client Library for Python" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1217,7 +1156,6 @@ msrest = ">=0.6.21" name = "azure-mgmt-eventhub" version = "10.1.0" description = "Microsoft Azure Event Hub Management Client Library for Python" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1234,7 +1172,6 @@ msrest = ">=0.6.21" name = "azure-mgmt-extendedlocation" version = "1.0.0b2" description = "Microsoft Azure Extendedlocation Management Client Library for Python" -category = "dev" optional = false python-versions = "*" files = [ @@ -1251,7 +1188,6 @@ msrest = ">=0.6.21" name = "azure-mgmt-hdinsight" version = "9.0.0" description = "Microsoft Azure HDInsight Management Client Library for Python" -category = "dev" optional = false python-versions = "*" files = [ @@ -1268,7 +1204,6 @@ msrest = ">=0.6.21" name = "azure-mgmt-imagebuilder" version = "1.1.0" description = "Microsoft Azure Image Builder Client Library for Python" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1285,7 +1220,6 @@ msrest = ">=0.6.21" name = "azure-mgmt-iotcentral" version = "10.0.0b2" description = "Microsoft Azure Iot Central Management Client Library for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1302,7 +1236,6 @@ msrest = ">=0.7.1" name = "azure-mgmt-iothub" version = "2.3.0" description = "Microsoft Azure IoT Hub Management Client Library for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1319,7 +1252,6 @@ msrest = ">=0.7.1" name = "azure-mgmt-iothubprovisioningservices" version = "1.1.0" description = "Microsoft Azure IoT Hub Provisioning Services Client Library for Python" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1336,7 +1268,6 @@ msrest = ">=0.6.21" name = "azure-mgmt-keyvault" version = "10.2.0" description = "Microsoft Azure Key Vault Management Client Library for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1353,7 +1284,6 @@ msrest = ">=0.7.1" name = "azure-mgmt-kusto" version = "0.3.0" description = "Microsoft Azure Kusto Management Client Library for Python" -category = "dev" optional = false python-versions = "*" files = [ @@ -1370,7 +1300,6 @@ msrestazure = ">=0.4.32,<2.0.0" name = "azure-mgmt-loganalytics" version = "13.0.0b4" description = "Microsoft Azure Log Analytics Management Client Library for Python" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1387,7 +1316,6 @@ msrest = ">=0.6.21" name = "azure-mgmt-managedservices" version = "1.0.0" description = "Microsoft Azure Managed Services Client Library for Python" -category = "dev" optional = false python-versions = "*" files = [ @@ -1404,7 +1332,6 @@ msrestazure = ">=0.4.32,<2.0.0" name = "azure-mgmt-managementgroups" version = "1.0.0" description = "Microsoft Azure Management Groups Client Library for Python" -category = "dev" optional = false python-versions = "*" files = [ @@ -1421,7 +1348,6 @@ msrest = ">=0.6.21" name = "azure-mgmt-maps" version = "2.0.0" description = "Microsoft Azure Maps Client Library for Python" -category = "dev" optional = false python-versions = "*" files = [ @@ -1438,7 +1364,6 @@ msrest = ">=0.6.21" name = "azure-mgmt-marketplaceordering" version = "1.1.0" description = "Microsoft Azure Market Place Ordering Client Library for Python" -category = "dev" optional = false python-versions = "*" files = [ @@ -1455,7 +1380,6 @@ msrest = ">=0.5.0" name = "azure-mgmt-media" version = "9.0.0" description = "Microsoft Azure Media Services Client Library for Python" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1472,7 +1396,6 @@ msrest = ">=0.6.21" name = "azure-mgmt-monitor" version = "5.0.1" description = "Microsoft Azure Monitor Client Library for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1489,7 +1412,6 @@ msrest = ">=0.7.1" name = "azure-mgmt-msi" version = "7.0.0" description = "Microsoft Azure Msi Management Client Library for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1506,7 +1428,6 @@ msrest = ">=0.7.1" name = "azure-mgmt-netapp" version = "9.0.1" description = "Microsoft Azure NetApp Files Management Client Library for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1523,7 +1444,6 @@ msrest = ">=0.7.1" name = "azure-mgmt-network" version = "23.0.1" description = "Microsoft Azure Network Management Client Library for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1540,7 +1460,6 @@ isodate = ">=0.6.1,<1.0.0" name = "azure-mgmt-nspkg" version = "3.0.2" description = "Microsoft Azure Resource Management Namespace Package [Internal]" -category = "dev" optional = false python-versions = "*" files = [ @@ -1556,7 +1475,6 @@ azure-nspkg = ">=3.0.0" name = "azure-mgmt-policyinsights" version = "1.1.0b4" description = "Microsoft Azure Policy Insights Client Library for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1573,7 +1491,6 @@ msrest = ">=0.7.1" name = "azure-mgmt-privatedns" version = "1.0.0" description = "Microsoft Azure DNS Private Zones Client Library for Python" -category = "dev" optional = false python-versions = "*" files = [ @@ -1590,7 +1507,6 @@ msrest = ">=0.5.0" name = "azure-mgmt-rdbms" version = "10.2.0b7" description = "Microsoft Azure RDBMS Management Client Library for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1607,7 +1523,6 @@ isodate = ">=0.6.1,<1.0.0" name = "azure-mgmt-recoveryservices" version = "2.2.0" description = "Microsoft Azure Recovery Services Client Library for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1624,7 +1539,6 @@ msrest = ">=0.7.1" name = "azure-mgmt-recoveryservicesbackup" version = "5.1.0" description = "Microsoft Azure Recovery Services Backup Management Client Library for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1641,7 +1555,6 @@ msrest = ">=0.7.1" name = "azure-mgmt-redhatopenshift" version = "1.2.0" description = "Microsoft Azure Red Hat Openshift Management Client Library for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1658,7 +1571,6 @@ msrest = ">=0.7.1" name = "azure-mgmt-redis" version = "14.1.0" description = "Microsoft Azure Redis Cache Management Client Library for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1675,7 +1587,6 @@ msrest = ">=0.7.1" name = "azure-mgmt-relay" version = "0.1.0" description = "Microsoft Azure Relay Client Library for Python" -category = "dev" optional = false python-versions = "*" files = [ @@ -1692,7 +1603,6 @@ msrestazure = ">=0.4.20,<2.0.0" name = "azure-mgmt-resource" version = "22.0.0" description = "Microsoft Azure Resource Management Client Library for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1709,7 +1619,6 @@ msrest = ">=0.7.1" name = "azure-mgmt-search" version = "9.0.0" description = "Microsoft Azure Search Management Client Library for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1726,7 +1635,6 @@ msrest = ">=0.7.1" name = "azure-mgmt-security" version = "3.0.0" description = "Microsoft Azure Security Center Management Client Library for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1743,7 +1651,6 @@ msrest = ">=0.7.1" name = "azure-mgmt-servicebus" version = "8.2.0" description = "Microsoft Azure Service Bus Management Client Library for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1760,7 +1667,6 @@ msrest = ">=0.7.1" name = "azure-mgmt-servicefabric" version = "1.0.0" description = "Microsoft Azure Service Fabric Management Client Library for Python" -category = "dev" optional = false python-versions = "*" files = [ @@ -1777,7 +1683,6 @@ msrest = ">=0.6.21" name = "azure-mgmt-servicefabricmanagedclusters" version = "1.0.0" description = "Microsoft Azure Servicefabricmanagedclusters Management Client Library for Python" -category = "dev" optional = false python-versions = "*" files = [ @@ -1794,7 +1699,6 @@ msrest = ">=0.6.21" name = "azure-mgmt-servicelinker" version = "1.2.0b1" description = "Microsoft Azure Service Linker Management Client Library for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1811,7 +1715,6 @@ msrest = ">=0.7.1" name = "azure-mgmt-signalr" version = "1.1.0" description = "Microsoft Azure SignalR Client Library for Python" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1828,7 +1731,6 @@ msrest = ">=0.6.21" name = "azure-mgmt-sql" version = "4.0.0b10" description = "Microsoft Azure SQL Management Client Library for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1845,7 +1747,6 @@ isodate = ">=0.6.1,<1.0.0" name = "azure-mgmt-sqlvirtualmachine" version = "1.0.0b5" description = "Microsoft Azure Sql Virtual Machine Management Client Library for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1862,7 +1763,6 @@ msrest = ">=0.7.1" name = "azure-mgmt-storage" version = "21.0.0" description = "Microsoft Azure Storage Management Client Library for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1879,7 +1779,6 @@ msrest = ">=0.7.1" name = "azure-mgmt-synapse" version = "2.1.0b5" description = "Microsoft Azure Synapse Management Client Library for Python" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1896,7 +1795,6 @@ msrest = ">=0.6.21" name = "azure-mgmt-trafficmanager" version = "1.0.0" description = "Microsoft Azure Traffic Manager Management Client Library for Python" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1913,7 +1811,6 @@ msrest = ">=0.6.21" name = "azure-mgmt-web" version = "7.0.0" description = "Microsoft Azure Web Apps Management Client Library for Python" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1930,7 +1827,6 @@ msrest = ">=0.6.21" name = "azure-multiapi-storage" version = "1.1.0" description = "Microsoft Azure Storage Client Library for Python with multi API version support." -category = "dev" optional = false python-versions = "*" files = [ @@ -1950,7 +1846,6 @@ requests = "*" name = "azure-nspkg" version = "3.0.2" description = "Microsoft Azure Namespace Package [Internal]" -category = "dev" optional = false python-versions = "*" files = [ @@ -1963,7 +1858,6 @@ files = [ name = "azure-storage-blob" version = "12.16.0" description = "Microsoft Azure Blob Storage Client Library for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1984,7 +1878,6 @@ aio = ["azure-core[aio] (>=1.26.0,<2.0.0)"] name = "azure-storage-common" version = "1.4.2" description = "Microsoft Azure Storage Common Client Library for Python" -category = "dev" optional = false python-versions = "*" files = [ @@ -2002,7 +1895,6 @@ requests = "*" name = "azure-synapse-accesscontrol" version = "0.5.0" description = "Microsoft Azure Synapse AccessControl Client Library for Python" -category = "dev" optional = false python-versions = "*" files = [ @@ -2019,7 +1911,6 @@ msrest = ">=0.5.0" name = "azure-synapse-artifacts" version = "0.15.0" description = "Microsoft Synapse Artifacts Client Library for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2035,7 +1926,6 @@ msrest = ">=0.7.1" name = "azure-synapse-managedprivateendpoints" version = "0.4.0" description = "Microsoft Azure Synapse Managed Private Endpoints Client Library for Python" -category = "dev" optional = false python-versions = "*" files = [ @@ -2052,7 +1942,6 @@ msrest = ">=0.5.0" name = "azure-synapse-spark" version = "0.2.0" description = "Microsoft Azure Synapse Spark Client Library for Python" -category = "dev" optional = false python-versions = "*" files = [ @@ -2069,7 +1958,6 @@ msrest = ">=0.5.0" name = "babel" version = "2.12.1" description = "Internationalization utilities" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2081,7 +1969,6 @@ files = [ name = "backcall" version = "0.2.0" description = "Specifications for callback functions passed in to an API" -category = "dev" optional = false python-versions = "*" files = [ @@ -2093,7 +1980,6 @@ files = [ name = "backoff" version = "2.2.1" description = "Function decoration for backoff and retry" -category = "main" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -2105,7 +1991,6 @@ files = [ name = "bcrypt" version = "4.0.1" description = "Modern password hashing for your software and your servers" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2140,7 +2025,6 @@ typecheck = ["mypy"] name = "beautifulsoup4" version = "4.12.2" description = "Screen-scraping library" -category = "dev" optional = false python-versions = ">=3.6.0" files = [ @@ -2159,7 +2043,6 @@ lxml = ["lxml"] name = "black" version = "22.12.0" description = "The uncompromising code formatter." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2195,7 +2078,6 @@ uvloop = ["uvloop (>=0.15.2)"] name = "blacken-docs" version = "1.13.0" description = "Run Black on Python code blocks in documentation files." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2210,7 +2092,6 @@ black = ">=22.1.0" name = "boto3" version = "1.26.76" description = "The AWS SDK for Python" -category = "dev" optional = false python-versions = ">= 3.7" files = [ @@ -2230,7 +2111,6 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] name = "botocore" version = "1.29.76" description = "Low-level, data-driven core of boto 3." -category = "dev" optional = false python-versions = ">= 3.7" files = [ @@ -2250,7 +2130,6 @@ crt = ["awscrt (==0.16.9)"] name = "botocore-stubs" version = "1.29.130" description = "Type annotations and code completion for botocore" -category = "dev" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -2265,7 +2144,6 @@ types-awscrt = "*" name = "cachetools" version = "5.3.0" description = "Extensible memoizing collections and decorators" -category = "dev" optional = false python-versions = "~=3.7" files = [ @@ -2275,14 +2153,13 @@ files = [ [[package]] name = "censys" -version = "2.2.2" +version = "2.2.3" description = "An easy-to-use and lightweight API wrapper for Censys APIs (censys.io)." -category = "main" optional = false python-versions = ">=3.7.2,<4.0.0" files = [ - {file = "censys-2.2.2-py3-none-any.whl", hash = "sha256:c735db4d384070b141aaa1a958e6935be640b7bf2cadf2ae6d769b2a9273a3b5"}, - {file = "censys-2.2.2.tar.gz", hash = "sha256:3099c91db4a97018a000265a2279a8e413bf40664f0cdd6c6eee1b550d4bf9d5"}, + {file = "censys-2.2.3-py3-none-any.whl", hash = "sha256:a32f41f0d367ffc63f98c147a1939653876fb4530bb82180d62af218ae390b2e"}, + {file = "censys-2.2.3.tar.gz", hash = "sha256:b92a864cb1d2cac8e6c50ffda98629372defd098e1d54c081c8a9ccb8c1050d4"}, ] [package.dependencies] @@ -2290,13 +2167,12 @@ argcomplete = ">=2.0.0,<4.0.0" backoff = ">=2.0.0,<3.0.0" requests = ">=2.29.0" rich = ">=10.16.2" -urllib3 = "<2.0.0" +urllib3 = "<3.0.0" [[package]] name = "certifi" version = "2023.5.7" description = "Python package for providing Mozilla's CA Bundle." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2308,7 +2184,6 @@ files = [ name = "cffi" version = "1.15.1" description = "Foreign Function Interface for Python calling C code." -category = "dev" optional = false python-versions = "*" files = [ @@ -2385,7 +2260,6 @@ pycparser = "*" name = "cfgv" version = "3.3.1" description = "Validate configuration and produce human readable error messages." -category = "dev" optional = false python-versions = ">=3.6.1" files = [ @@ -2397,7 +2271,6 @@ files = [ name = "chardet" version = "3.0.4" description = "Universal encoding detector for Python 2 and 3" -category = "dev" optional = false python-versions = "*" files = [ @@ -2409,7 +2282,6 @@ files = [ name = "charset-normalizer" version = "3.1.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -2494,7 +2366,6 @@ files = [ name = "click" version = "8.1.3" description = "Composable command line interface toolkit" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2509,7 +2380,6 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." -category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -2521,7 +2391,6 @@ files = [ name = "coverage" version = "7.2.5" description = "Code coverage measurement for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2588,7 +2457,6 @@ toml = ["tomli"] name = "cryptography" version = "40.0.2" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2630,7 +2498,6 @@ tox = ["tox"] name = "darglint" version = "1.8.1" description = "A utility for ensuring Google-style docstrings stay up to date with the source code." -category = "dev" optional = false python-versions = ">=3.6,<4.0" files = [ @@ -2642,7 +2509,6 @@ files = [ name = "decorator" version = "5.1.1" description = "Decorators for Humans" -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -2654,7 +2520,6 @@ files = [ name = "deprecated" version = "1.2.13" description = "Python @deprecated decorator to deprecate old python classes, functions or methods." -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -2672,7 +2537,6 @@ dev = ["PyTest", "PyTest (<5)", "PyTest-Cov", "PyTest-Cov (<2.6)", "bump2version name = "distlib" version = "0.3.6" description = "Distribution utilities" -category = "dev" optional = false python-versions = "*" files = [ @@ -2684,7 +2548,6 @@ files = [ name = "distro" version = "1.8.0" description = "Distro - an OS platform information API" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2696,7 +2559,6 @@ files = [ name = "dnspython" version = "2.3.0" description = "DNS toolkit" -category = "main" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -2717,7 +2579,6 @@ wmi = ["wmi (>=1.5.1,<2.0.0)"] name = "docutils" version = "0.19" description = "Docutils -- Python Documentation Utilities" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2729,7 +2590,6 @@ files = [ name = "email-validator" version = "2.0.0.post2" description = "A robust email address syntax and deliverability validation library." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2745,7 +2605,6 @@ idna = ">=2.0.0" name = "exceptiongroup" version = "1.1.1" description = "Backport of PEP 654 (exception groups)" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2760,7 +2619,6 @@ test = ["pytest (>=6)"] name = "executing" version = "1.2.0" description = "Get the currently executing AST node of a frame, and other information" -category = "dev" optional = false python-versions = "*" files = [ @@ -2775,7 +2633,6 @@ tests = ["asttokens", "littleutils", "pytest", "rich"] name = "fabric" version = "2.7.1" description = "High level SSH command execution" -category = "dev" optional = false python-versions = "*" files = [ @@ -2796,7 +2653,6 @@ testing = ["mock (>=2.0.0,<3.0)"] name = "filelock" version = "3.12.0" description = "A platform independent file lock." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2812,7 +2668,6 @@ testing = ["covdefaults (>=2.3)", "coverage (>=7.2.3)", "diff-cover (>=7.5)", "p name = "flake8" version = "4.0.1" description = "the modular source code checker: pep8 pyflakes and co" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2829,7 +2684,6 @@ pyflakes = ">=2.4.0,<2.5.0" name = "flake8-black" version = "0.3.6" description = "flake8 plugin to call black as a code style validator" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2849,7 +2703,6 @@ develop = ["build", "twine"] name = "flake8-bugbear" version = "22.12.6" description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2868,7 +2721,6 @@ dev = ["coverage", "hypothesis", "hypothesmith (>=0.2)", "pre-commit", "tox"] name = "flake8-comprehensions" version = "3.12.0" description = "A flake8 plugin to help you write better list/set/dict comprehensions." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2883,7 +2735,6 @@ flake8 = ">=3.0,<3.2.0 || >3.2.0" name = "flake8-docstrings" version = "1.7.0" description = "Extension for flake8 which uses pydocstyle to check docstrings" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2899,7 +2750,6 @@ pydocstyle = ">=2.1" name = "flake8-isort" version = "4.2.0" description = "flake8 plugin that integrates isort ." -category = "dev" optional = false python-versions = "*" files = [ @@ -2918,7 +2768,6 @@ test = ["pytest-cov"] name = "flake8-plugin-utils" version = "1.3.2" description = "The package provides base classes and utils for flake8 plugin writing" -category = "dev" optional = false python-versions = ">=3.6,<4.0" files = [ @@ -2930,7 +2779,6 @@ files = [ name = "flake8-polyfill" version = "1.0.2" description = "Polyfill package for Flake8 plugins" -category = "dev" optional = false python-versions = "*" files = [ @@ -2945,7 +2793,6 @@ flake8 = "*" name = "flake8-pytest-style" version = "1.7.2" description = "A flake8 plugin checking common style issues or inconsistencies with pytest-based tests." -category = "dev" optional = false python-versions = ">=3.7.2,<4.0.0" files = [ @@ -2960,7 +2807,6 @@ flake8-plugin-utils = ">=1.3.2,<2.0.0" name = "flake8-simplify" version = "0.18.2" description = "flake8 plugin which checks for code that can be simplified" -category = "dev" optional = false python-versions = ">=3.6.1" files = [ @@ -2976,7 +2822,6 @@ flake8 = ">=3.7" name = "frozenlist" version = "1.3.3" description = "A list-like structure which implements collections.abc.MutableSequence" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3060,7 +2905,6 @@ files = [ name = "furo" version = "2022.12.7" description = "A clean customisable Sphinx documentation theme." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3078,7 +2922,6 @@ sphinx-basic-ng = "*" name = "google-api-core" version = "2.11.0" description = "Google API client core library" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3109,7 +2952,6 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0dev)"] name = "google-auth" version = "2.17.3" description = "Google Authentication Library" -category = "dev" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*" files = [ @@ -3134,7 +2976,6 @@ requests = ["requests (>=2.20.0,<3.0.0dev)"] name = "google-cloud-resource-manager" version = "1.10.0" description = "Google Cloud Resource Manager API client library" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3143,7 +2984,7 @@ files = [ ] [package.dependencies] -google-api-core = {version = ">=1.34.0,<2.0.0 || >=2.11.0,<3.0.0dev", extras = ["grpc"]} +google-api-core = {version = ">=1.34.0,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]} grpc-google-iam-v1 = ">=0.12.4,<1.0.0dev" proto-plus = [ {version = ">=1.22.0,<2.0.0dev", markers = "python_version < \"3.11\""}, @@ -3155,7 +2996,6 @@ protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4 name = "google-cloud-securitycenter" version = "1.21.0" description = "Google Cloud Securitycenter API client library" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3164,7 +3004,7 @@ files = [ ] [package.dependencies] -google-api-core = {version = ">=1.34.0,<2.0.0 || >=2.11.0,<3.0.0dev", extras = ["grpc"]} +google-api-core = {version = ">=1.34.0,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]} grpc-google-iam-v1 = ">=0.12.4,<1.0.0dev" proto-plus = [ {version = ">=1.22.0,<2.0.0dev", markers = "python_version < \"3.11\""}, @@ -3176,7 +3016,6 @@ protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4 name = "googleapis-common-protos" version = "1.59.0" description = "Common protobufs used in Google APIs" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3195,7 +3034,6 @@ grpc = ["grpcio (>=1.44.0,<2.0.0dev)"] name = "grpc-google-iam-v1" version = "0.12.6" description = "IAM API client library" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3212,7 +3050,6 @@ protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4 name = "grpcio" version = "1.54.0" description = "HTTP/2-based RPC framework" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3270,7 +3107,6 @@ protobuf = ["grpcio-tools (>=1.54.0)"] name = "grpcio-status" version = "1.54.0" description = "Status proto mapping for gRPC" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -3287,7 +3123,6 @@ protobuf = ">=4.21.6" name = "humanfriendly" version = "10.0" description = "Human friendly output for text interfaces using Python" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -3302,7 +3137,6 @@ pyreadline3 = {version = "*", markers = "sys_platform == \"win32\" and python_ve name = "identify" version = "2.5.24" description = "File identification library for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3317,7 +3151,6 @@ license = ["ukkonen"] name = "idna" version = "3.4" description = "Internationalized Domain Names in Applications (IDNA)" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -3329,7 +3162,6 @@ files = [ name = "imagesize" version = "1.4.1" description = "Getting image size from png/jpeg/jpeg2000/gif file" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -3341,7 +3173,6 @@ files = [ name = "importlib-metadata" version = "6.6.0" description = "Read metadata from Python packages" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3361,7 +3192,6 @@ testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packag name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3373,7 +3203,6 @@ files = [ name = "inquirerpy" version = "0.3.4" description = "Python port of Inquirer.js (A collection of common interactive command-line user interfaces)" -category = "main" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -3392,7 +3221,6 @@ docs = ["Sphinx (>=4.1.2,<5.0.0)", "furo (>=2021.8.17-beta.43,<2022.0.0)", "myst name = "invoke" version = "1.7.3" description = "Pythonic task execution" -category = "dev" optional = false python-versions = "*" files = [ @@ -3404,7 +3232,6 @@ files = [ name = "ipython" version = "8.13.2" description = "IPython: Productive Interactive Computing" -category = "dev" optional = false python-versions = ">=3.9" files = [ @@ -3444,7 +3271,6 @@ test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.21)", "pa name = "isodate" version = "0.6.1" description = "An ISO 8601 date/time/duration parser and formatter" -category = "dev" optional = false python-versions = "*" files = [ @@ -3459,7 +3285,6 @@ six = "*" name = "isort" version = "5.12.0" description = "A Python utility / library to sort Python imports." -category = "dev" optional = false python-versions = ">=3.8.0" files = [ @@ -3477,7 +3302,6 @@ requirements-deprecated-finder = ["pip-api", "pipreqs"] name = "javaproperties" version = "0.5.2" description = "Read & write Java .properties files" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, <4" files = [ @@ -3492,7 +3316,6 @@ six = ">=1.4,<2.0" name = "jedi" version = "0.18.2" description = "An autocompletion tool for Python that can be used for text editors." -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -3512,7 +3335,6 @@ testing = ["Django (<3.1)", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] name = "jinja2" version = "3.1.2" description = "A very fast and expressive template engine." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3530,7 +3352,6 @@ i18n = ["Babel (>=2.7)"] name = "jmespath" version = "1.0.1" description = "JSON Matching Expressions" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3542,7 +3363,6 @@ files = [ name = "jsondiff" version = "2.0.0" description = "Diff JSON and JSON-like structures in Python" -category = "dev" optional = false python-versions = "*" files = [ @@ -3554,7 +3374,6 @@ files = [ name = "knack" version = "0.10.1" description = "A Command-Line Interface framework" -category = "dev" optional = false python-versions = "*" files = [ @@ -3573,7 +3392,6 @@ tabulate = "*" name = "livereload" version = "2.6.3" description = "Python LiveReload is an awesome tool for web developers" -category = "dev" optional = false python-versions = "*" files = [ @@ -3589,7 +3407,6 @@ tornado = {version = "*", markers = "python_version > \"2.7\""} name = "markdown-it-py" version = "2.2.0" description = "Python port of markdown-it. Markdown parsing, done right!" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3614,7 +3431,6 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] name = "markupsafe" version = "2.1.2" description = "Safely add untrusted strings to HTML/XML markup." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3674,7 +3490,6 @@ files = [ name = "matplotlib-inline" version = "0.1.6" description = "Inline Matplotlib backend for Jupyter" -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -3689,7 +3504,6 @@ traitlets = "*" name = "mccabe" version = "0.6.1" description = "McCabe checker, plugin for flake8" -category = "dev" optional = false python-versions = "*" files = [ @@ -3701,7 +3515,6 @@ files = [ name = "mdit-py-plugins" version = "0.3.5" description = "Collection of plugins for markdown-it-py" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3721,7 +3534,6 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] name = "mdurl" version = "0.1.2" description = "Markdown URL utilities" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3733,7 +3545,6 @@ files = [ name = "msal" version = "1.20.0" description = "The Microsoft Authentication Library (MSAL) for Python library enables your app to access the Microsoft Cloud by supporting authentication of users with Microsoft Azure Active Directory accounts (AAD) and Microsoft Accounts (MSA) using industry standard OAuth2 and OpenID Connect." -category = "dev" optional = false python-versions = "*" files = [ @@ -3754,7 +3565,6 @@ broker = ["pymsalruntime (>=0.11.2,<0.14)"] name = "msal-extensions" version = "1.0.0" description = "Microsoft Authentication Library extensions (MSAL EX) provides a persistence API that can save your data on disk, encrypted on Windows, macOS and Linux. Concurrent data access will be coordinated by a file lock mechanism." -category = "dev" optional = false python-versions = "*" files = [ @@ -3773,7 +3583,6 @@ portalocker = [ name = "msrest" version = "0.7.1" description = "AutoRest swagger generator Python client runtime." -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -3795,7 +3604,6 @@ async = ["aiodns", "aiohttp (>=3.0)"] name = "msrestazure" version = "0.6.4" description = "AutoRest swagger generator Python client runtime. Azure-specific module." -category = "dev" optional = false python-versions = "*" files = [ @@ -3812,7 +3620,6 @@ six = "*" name = "multidict" version = "6.0.4" description = "multidict implementation" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3896,7 +3703,6 @@ files = [ name = "mypy" version = "0.942" description = "Optional static typing for Python" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -3939,7 +3745,6 @@ reports = ["lxml"] name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -3951,7 +3756,6 @@ files = [ name = "myst-parser" version = "0.18.1" description = "An extended commonmark compliant parser, with bridges to docutils & sphinx." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3978,7 +3782,6 @@ testing = ["beautifulsoup4", "coverage[toml]", "pytest (>=6,<7)", "pytest-cov", name = "nodeenv" version = "1.7.0" description = "Node.js virtual environment builder" -category = "dev" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" files = [ @@ -3993,7 +3796,6 @@ setuptools = "*" name = "oauthlib" version = "3.2.2" description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -4010,7 +3812,6 @@ signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] name = "packaging" version = "23.1" description = "Core utilities for Python packages" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -4022,7 +3823,6 @@ files = [ name = "parameterized" version = "0.8.1" description = "Parameterized testing with any Python test framework" -category = "dev" optional = false python-versions = "*" files = [ @@ -4037,7 +3837,6 @@ dev = ["jinja2"] name = "paramiko" version = "3.1.0" description = "SSH2 protocol library" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -4059,7 +3858,6 @@ invoke = ["invoke (>=2.0)"] name = "parso" version = "0.8.3" description = "A Python Parser" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -4075,7 +3873,6 @@ testing = ["docopt", "pytest (<6.0.0)"] name = "pathlib2" version = "2.3.7.post1" description = "Object-oriented filesystem paths" -category = "dev" optional = false python-versions = "*" files = [ @@ -4090,7 +3887,6 @@ six = "*" name = "pathspec" version = "0.11.1" description = "Utility library for gitignore style pattern matching of file paths." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -4102,7 +3898,6 @@ files = [ name = "pep8-naming" version = "0.12.1" description = "Check PEP-8 naming conventions, plugin for flake8" -category = "dev" optional = false python-versions = "*" files = [ @@ -4118,7 +3913,6 @@ flake8-polyfill = ">=1.0.2,<2" name = "pexpect" version = "4.8.0" description = "Pexpect allows easy control of interactive console applications." -category = "dev" optional = false python-versions = "*" files = [ @@ -4133,7 +3927,6 @@ ptyprocess = ">=0.5" name = "pfzy" version = "0.3.4" description = "Python port of the fzy fuzzy string matching algorithm" -category = "main" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -4148,7 +3941,6 @@ docs = ["Sphinx (>=4.1.2,<5.0.0)", "furo (>=2021.8.17-beta.43,<2022.0.0)", "myst name = "pickleshare" version = "0.7.5" description = "Tiny 'shelve'-like database with concurrency support" -category = "dev" optional = false python-versions = "*" files = [ @@ -4160,7 +3952,6 @@ files = [ name = "pkginfo" version = "1.9.6" description = "Query metadata from sdists / bdists / installed packages." -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -4175,7 +3966,6 @@ testing = ["pytest", "pytest-cov"] name = "platformdirs" version = "3.5.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -4191,7 +3981,6 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.3.1)", "pytest- name = "pluggy" version = "1.0.0" description = "plugin and hook calling mechanisms for python" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -4207,7 +3996,6 @@ testing = ["pytest", "pytest-benchmark"] name = "portalocker" version = "2.7.0" description = "Wraps the portalocker recipe for easy usage" -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -4227,7 +4015,6 @@ tests = ["pytest (>=5.4.1)", "pytest-cov (>=2.8.1)", "pytest-mypy (>=0.8.0)", "p name = "pre-commit" version = "2.21.0" description = "A framework for managing and maintaining multi-language pre-commit hooks." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -4246,7 +4033,6 @@ virtualenv = ">=20.10.0" name = "prompt-toolkit" version = "3.0.38" description = "Library for building powerful interactive command lines in Python" -category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -4261,7 +4047,6 @@ wcwidth = "*" name = "proto-plus" version = "1.22.2" description = "Beautiful, Pythonic protocol buffers." -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -4279,7 +4064,6 @@ testing = ["google-api-core[grpc] (>=1.31.5)"] name = "protobuf" version = "4.23.0" description = "" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -4302,7 +4086,6 @@ files = [ name = "psutil" version = "5.9.5" description = "Cross-platform lib for process and system monitoring in Python." -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -4329,7 +4112,6 @@ test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] name = "ptyprocess" version = "0.7.0" description = "Run a subprocess in a pseudo terminal" -category = "dev" optional = false python-versions = "*" files = [ @@ -4341,7 +4123,6 @@ files = [ name = "pure-eval" version = "0.2.2" description = "Safely evaluate AST nodes without side effects" -category = "dev" optional = false python-versions = "*" files = [ @@ -4356,7 +4137,6 @@ tests = ["pytest"] name = "pyasn1" version = "0.5.0" description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" -category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ @@ -4368,7 +4148,6 @@ files = [ name = "pyasn1-modules" version = "0.3.0" description = "A collection of ASN.1-based protocols modules" -category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ @@ -4383,7 +4162,6 @@ pyasn1 = ">=0.4.6,<0.6.0" name = "pycodestyle" version = "2.8.0" description = "Python style guide checker" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -4395,7 +4173,6 @@ files = [ name = "pycparser" version = "2.21" description = "C parser in Python" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -4407,7 +4184,6 @@ files = [ name = "pydantic" version = "1.10.7" description = "Data validation and settings management using python type hints" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4462,7 +4238,6 @@ email = ["email-validator (>=1.0.3)"] name = "pydocstyle" version = "6.3.0" description = "Python docstring style checker" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -4480,7 +4255,6 @@ toml = ["tomli (>=1.2.3)"] name = "pyflakes" version = "2.4.0" description = "passive checker of Python programs" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -4492,7 +4266,6 @@ files = [ name = "pygithub" version = "1.58.2" description = "Use the full Github API v3" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -4510,7 +4283,6 @@ requests = ">=2.14.0" name = "pygments" version = "2.15.1" description = "Pygments is a syntax highlighting package written in Python." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4525,7 +4297,6 @@ plugins = ["importlib-metadata"] name = "pyjwt" version = "2.6.0" description = "JSON Web Token implementation in Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -4546,7 +4317,6 @@ tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] name = "pymsalruntime" version = "0.13.2" description = "The MSALRuntime Python Interop Package" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -4569,7 +4339,6 @@ files = [ name = "pynacl" version = "1.5.0" description = "Python binding to the Networking and Cryptography (NaCl) library" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -4596,7 +4365,6 @@ tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] name = "pyopenssl" version = "23.1.1" description = "Python wrapper module around the OpenSSL library" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -4615,7 +4383,6 @@ test = ["flaky", "pretend", "pytest (>=3.0.1)"] name = "pyreadline3" version = "3.4.1" description = "A python implementation of GNU readline." -category = "dev" optional = false python-versions = "*" files = [ @@ -4627,7 +4394,6 @@ files = [ name = "pysocks" version = "1.7.1" description = "A Python SOCKS client module. See https://github.com/Anorov/PySocks for more information." -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -4640,7 +4406,6 @@ files = [ name = "pytest" version = "7.3.1" description = "pytest: simple powerful testing with Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -4663,7 +4428,6 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no name = "pytest-asyncio" version = "0.21.0" description = "Pytest support for asyncio" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -4682,7 +4446,6 @@ testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy name = "pytest-cov" version = "3.0.0" description = "Pytest plugin for measuring coverage." -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -4701,7 +4464,6 @@ testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtuale name = "pytest-datadir" version = "1.4.1" description = "pytest plugin for test data directories and files" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -4716,7 +4478,6 @@ pytest = ">=5.0" name = "pytest-mock" version = "3.10.0" description = "Thin-wrapper around the mock package for easier use with pytest" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -4734,7 +4495,6 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] name = "python-dateutil" version = "2.8.2" description = "Extensions to the standard Python datetime module" -category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ @@ -4749,7 +4509,6 @@ six = ">=1.5" name = "python-dotenv" version = "1.0.0" description = "Read key-value pairs from a .env file and set them as environment variables" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -4764,7 +4523,6 @@ cli = ["click (>=5.0)"] name = "pyupgrade" version = "2.38.4" description = "A tool to automatically upgrade syntax for newer versions." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -4779,7 +4537,6 @@ tokenize-rt = "<5" name = "pywin32" version = "303" description = "Python for Window Extensions" -category = "dev" optional = false python-versions = "*" files = [ @@ -4801,7 +4558,6 @@ files = [ name = "pyyaml" version = "6.0" description = "YAML parser and emitter for Python" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -4851,7 +4607,6 @@ files = [ name = "requests" version = "2.31.0" description = "Python HTTP for Humans." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4874,7 +4629,6 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "requests-oauthlib" version = "1.3.1" description = "OAuthlib authentication support for Requests." -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -4893,7 +4647,6 @@ rsa = ["oauthlib[signedtoken] (>=3.0.0)"] name = "responses" version = "0.21.0" description = "A utility library for mocking out the `requests` Python library." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -4912,7 +4665,6 @@ tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asy name = "rich" version = "13.3.5" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" -category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -4931,7 +4683,6 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] name = "rsa" version = "4.9" description = "Pure-Python RSA implementation" -category = "dev" optional = false python-versions = ">=3.6,<4" files = [ @@ -4946,7 +4697,6 @@ pyasn1 = ">=0.1.3" name = "s3transfer" version = "0.6.1" description = "An Amazon S3 Transfer Manager" -category = "dev" optional = false python-versions = ">= 3.7" files = [ @@ -4964,7 +4714,6 @@ crt = ["botocore[crt] (>=1.20.29,<2.0a.0)"] name = "scp" version = "0.13.6" description = "scp module for paramiko" -category = "dev" optional = false python-versions = "*" files = [ @@ -4979,7 +4728,6 @@ paramiko = "*" name = "semver" version = "2.13.0" description = "Python helper for Semantic Versioning (http://semver.org/)" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -4991,7 +4739,6 @@ files = [ name = "setuptools" version = "67.7.2" description = "Easily download, build, install, upgrade, and uninstall Python packages" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -5008,7 +4755,6 @@ testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs ( name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -5020,7 +4766,6 @@ files = [ name = "sniffio" version = "1.3.0" description = "Sniff out which async library your code is running under" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -5032,7 +4777,6 @@ files = [ name = "snowballstemmer" version = "2.2.0" description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." -category = "dev" optional = false python-versions = "*" files = [ @@ -5044,7 +4788,6 @@ files = [ name = "soupsieve" version = "2.4.1" description = "A modern CSS selector implementation for Beautiful Soup." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -5056,7 +4799,6 @@ files = [ name = "sphinx" version = "5.3.0" description = "Python documentation generator" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -5092,7 +4834,6 @@ test = ["cython", "html5lib", "pytest (>=4.6)", "typed_ast"] name = "sphinx-autobuild" version = "2021.3.14" description = "Rebuild Sphinx documentation on changes, with live-reload in the browser." -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -5112,7 +4853,6 @@ test = ["pytest", "pytest-cov"] name = "sphinx-basic-ng" version = "1.0.0b1" description = "A modern skeleton for Sphinx themes." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -5130,7 +4870,6 @@ docs = ["furo", "ipython", "myst-parser", "sphinx-copybutton", "sphinx-inline-ta name = "sphinx-copybutton" version = "0.5.2" description = "Add a copy button to each of your code cells." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -5149,7 +4888,6 @@ rtd = ["ipython", "myst-nb", "sphinx", "sphinx-book-theme", "sphinx-examples"] name = "sphinx-design" version = "0.3.0" description = "A sphinx extension for designing beautiful, view size responsive web components." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -5173,7 +4911,6 @@ theme-sbt = ["sphinx-book-theme (>=0.3.0,<0.4.0)"] name = "sphinx-prompt" version = "1.5.0" description = "Sphinx directive to add unselectable prompt" -category = "dev" optional = false python-versions = "*" files = [ @@ -5188,7 +4925,6 @@ Sphinx = "*" name = "sphinxcontrib-applehelp" version = "1.0.4" description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -5204,7 +4940,6 @@ test = ["pytest"] name = "sphinxcontrib-asciinema" version = "0.3.7" description = "Embed asciinema casts in your Sphinx docs" -category = "dev" optional = false python-versions = "*" files = [ @@ -5218,7 +4953,6 @@ sphinx = "*" name = "sphinxcontrib-autoprogram" version = "0.1.8" description = "Documenting CLI programs" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -5234,7 +4968,6 @@ Sphinx = ">=1.2" name = "sphinxcontrib-devhelp" version = "1.0.2" description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document." -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -5250,7 +4983,6 @@ test = ["pytest"] name = "sphinxcontrib-htmlhelp" version = "2.0.1" description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -5266,7 +4998,6 @@ test = ["html5lib", "pytest"] name = "sphinxcontrib-jsmath" version = "1.0.1" description = "A sphinx extension which renders display math in HTML via JavaScript" -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -5281,7 +5012,6 @@ test = ["flake8", "mypy", "pytest"] name = "sphinxcontrib-qthelp" version = "1.0.3" description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document." -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -5297,7 +5027,6 @@ test = ["pytest"] name = "sphinxcontrib-serializinghtml" version = "1.1.5" description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)." -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -5313,7 +5042,6 @@ test = ["pytest"] name = "sshtunnel" version = "0.1.5" description = "Pure python SSH tunnels" -category = "dev" optional = false python-versions = "*" files = [ @@ -5333,7 +5061,6 @@ test = ["tox (>=1.8.1)"] name = "stack-data" version = "0.6.2" description = "Extract data from python stack frames and tracebacks for informative displays" -category = "dev" optional = false python-versions = "*" files = [ @@ -5353,7 +5080,6 @@ tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] name = "tabulate" version = "0.9.0" description = "Pretty-print tabular data" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -5368,7 +5094,6 @@ widechars = ["wcwidth"] name = "tokenize-rt" version = "4.2.1" description = "A wrapper around the stdlib `tokenize` which roundtrips." -category = "dev" optional = false python-versions = ">=3.6.1" files = [ @@ -5380,7 +5105,6 @@ files = [ name = "tomli" version = "2.0.1" description = "A lil' TOML parser" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -5392,7 +5116,6 @@ files = [ name = "tornado" version = "6.3.2" description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." -category = "dev" optional = false python-versions = ">= 3.8" files = [ @@ -5413,7 +5136,6 @@ files = [ name = "traitlets" version = "5.9.0" description = "Traitlets Python configuration system" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -5429,7 +5151,6 @@ test = ["argcomplete (>=2.0)", "pre-commit", "pytest", "pytest-mock"] name = "types-aiobotocore" version = "2.5.0.post2" description = "Type annotations for aiobotocore 2.5.0 generated with mypy-boto3-builder 7.14.5" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -5799,7 +5520,6 @@ xray = ["types-aiobotocore-xray (>=2.5.0,<2.6.0)"] name = "types-aiobotocore-apigateway" version = "2.5.0.post1" description = "Type annotations for aiobotocore.APIGateway 2.5.0 service generated with mypy-boto3-builder 7.13.0" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -5814,7 +5534,6 @@ typing-extensions = ">=4.1.0" name = "types-aiobotocore-apigatewayv2" version = "2.5.0.post1" description = "Type annotations for aiobotocore.ApiGatewayV2 2.5.0 service generated with mypy-boto3-builder 7.13.0" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -5829,7 +5548,6 @@ typing-extensions = ">=4.1.0" name = "types-aiobotocore-ec2" version = "2.5.0.post1" description = "Type annotations for aiobotocore.EC2 2.5.0 service generated with mypy-boto3-builder 7.13.0" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -5844,7 +5562,6 @@ typing-extensions = ">=4.1.0" name = "types-aiobotocore-ecs" version = "2.5.0.post1" description = "Type annotations for aiobotocore.ECS 2.5.0 service generated with mypy-boto3-builder 7.13.0" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -5859,7 +5576,6 @@ typing-extensions = ">=4.1.0" name = "types-aiobotocore-elb" version = "2.5.0.post1" description = "Type annotations for aiobotocore.ElasticLoadBalancing 2.5.0 service generated with mypy-boto3-builder 7.13.0" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -5874,7 +5590,6 @@ typing-extensions = ">=4.1.0" name = "types-aiobotocore-elbv2" version = "2.5.0.post1" description = "Type annotations for aiobotocore.ElasticLoadBalancingv2 2.5.0 service generated with mypy-boto3-builder 7.13.0" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -5889,7 +5604,6 @@ typing-extensions = ">=4.1.0" name = "types-aiobotocore-rds" version = "2.5.0.post1" description = "Type annotations for aiobotocore.RDS 2.5.0 service generated with mypy-boto3-builder 7.13.0" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -5904,7 +5618,6 @@ typing-extensions = ">=4.1.0" name = "types-aiobotocore-route53" version = "2.5.0.post1" description = "Type annotations for aiobotocore.Route53 2.5.0 service generated with mypy-boto3-builder 7.13.0" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -5919,7 +5632,6 @@ typing-extensions = ">=4.1.0" name = "types-aiobotocore-s3" version = "2.5.0.post1" description = "Type annotations for aiobotocore.S3 2.5.0 service generated with mypy-boto3-builder 7.13.0" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -5934,7 +5646,6 @@ typing-extensions = ">=4.1.0" name = "types-aiobotocore-sts" version = "2.5.0.post1" description = "Type annotations for aiobotocore.STS 2.5.0 service generated with mypy-boto3-builder 7.13.0" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -5949,7 +5660,6 @@ typing-extensions = ">=4.1.0" name = "types-awscrt" version = "0.16.17" description = "Type annotations and code completion for awscrt" -category = "dev" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -5961,7 +5671,6 @@ files = [ name = "types-pyyaml" version = "6.0.12.9" description = "Typing stubs for PyYAML" -category = "dev" optional = false python-versions = "*" files = [ @@ -5973,7 +5682,6 @@ files = [ name = "types-requests" version = "2.30.0.0" description = "Typing stubs for requests" -category = "dev" optional = false python-versions = "*" files = [ @@ -5988,7 +5696,6 @@ types-urllib3 = "*" name = "types-urllib3" version = "1.26.25.12" description = "Typing stubs for urllib3" -category = "dev" optional = false python-versions = "*" files = [ @@ -6000,7 +5707,6 @@ files = [ name = "typing-extensions" version = "4.5.0" description = "Backported and Experimental Type Hints for Python 3.7+" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -6012,7 +5718,6 @@ files = [ name = "urllib3" version = "1.26.15" description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ @@ -6036,7 +5741,6 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] name = "urllib3-secure-extra" version = "0.1.0" description = "Marker library to detect whether urllib3 was installed with the deprecated [secure] extra" -category = "dev" optional = false python-versions = "*" files = [ @@ -6048,7 +5752,6 @@ files = [ name = "virtualenv" version = "20.23.0" description = "Virtual Python Environment builder" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -6069,7 +5772,6 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.3)", "coverage-enable-subprocess name = "wcwidth" version = "0.2.6" description = "Measures the displayed width of unicode strings in a terminal" -category = "main" optional = false python-versions = "*" files = [ @@ -6081,7 +5783,6 @@ files = [ name = "websocket-client" version = "1.3.3" description = "WebSocket client for Python with low level API options" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -6098,7 +5799,6 @@ test = ["websockets"] name = "wrapt" version = "1.15.0" description = "Module for decorators, wrappers and monkey patching." -category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" files = [ @@ -6183,7 +5883,6 @@ files = [ name = "xmltodict" version = "0.13.0" description = "Makes working with XML feel like you are working with JSON" -category = "dev" optional = false python-versions = ">=3.4" files = [ @@ -6195,7 +5894,6 @@ files = [ name = "yarl" version = "1.9.2" description = "Yet another URL library" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -6283,7 +5981,6 @@ multidict = ">=4.0" name = "zipp" version = "3.15.0" description = "Backport of pathlib-compatible object wrapper for zip files" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -6298,4 +5995,4 @@ testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more [metadata] lock-version = "2.0" python-versions = ">=3.9,<4.0" -content-hash = "75674c49d099b459b002225e4bae79a1d02e09dc588851359bc205087cde3f6d" +content-hash = "b3f8ce2f1be49abb37fb4f9218c7972785c7c3529df98c043fc5b0e4fb055175" diff --git a/pyproject.toml b/pyproject.toml index 4114a34..f445acd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -44,7 +44,7 @@ censys-cc = "censys.cloud_connectors.common.cli:main" python = ">=3.9,<4.0" aiometer = "^0.4.0" backoff = "^2.2.1" -censys = "^2.2.2" +censys = "^2.2.3" inquirerpy = "^0.3.3" pydantic = {extras = ["dotenv", "email"], version = "^1.9.0"} PyYAML = "^6.0" @@ -83,6 +83,7 @@ pytest-cov = "^3.0.0" pytest-datadir = "^1.3.1" pytest-mock = "^3.7.0" responses = "^0.21.0" +jsondiff = "^2.0.0" [tool.poetry.group.docs] optional = true @@ -151,6 +152,7 @@ module = [ # Common "InquirerPy.*", "importlib_metadata", + "jsondiff", # Tests "parameterized", "asynctest", diff --git a/scripts/compare_workspaces.py b/scripts/compare_workspaces.py new file mode 100644 index 0000000..822c6ce --- /dev/null +++ b/scripts/compare_workspaces.py @@ -0,0 +1,144 @@ +#!/usr/bin/env python3 + +# This is a script to compare two workspaces' seeds and cloud assets. +# It will print out the differences between the two workspaces. + +import argparse +import json +from typing import Optional + +from jsondiff import diff + +from censys.asm import Seeds +from censys.asm.assets import ObjectStorageAssets + + +def parse_args() -> argparse.Namespace: + """Parse the command line arguments. + + Returns: + argparse.Namespace: The parsed arguments. + """ + parser = argparse.ArgumentParser(description="Compare two workspaces.") + parser.add_argument( + "--workspace1", + type=str, + required=True, + help="The first workspace API key to use.", + dest="workspace1_api_key", + ) + parser.add_argument( + "--workspace2", + type=str, + required=True, + help="The second workspace API key to use.", + dest="workspace2_api_key", + ) + return parser.parse_args() + + +def clean_seeds(seeds: list) -> list: + """Remove the ID and createdOn field from the seeds, as it is not relevant to the comparison. + + Args: + seeds (list): The list of seeds to clean. + + Returns: + list: The cleaned list of seeds. + """ + for seed in seeds: + if "id" in seed: + del seed["id"] + if "createdOn" in seed: + del seed["createdOn"] + return seeds + + +def compare_seeds(workspace_1_api_key: str, workspace_2_api_key: str) -> Optional[dict]: + """Compare the seeds of two workspaces. + + Args: + workspace_1_api_key (str): The first workspace API key to use. + workspace_2_api_key (str): The second workspace API key to use. + + Returns: + Optional[dict]: The difference between the two workspaces. + """ + # Create the clients + seed_client_1 = Seeds(workspace_1_api_key) + seed_client_2 = Seeds(workspace_2_api_key) + + # Get the seeds from both workspaces + seeds_1 = seed_client_1.get_seeds() + seeds_2 = seed_client_2.get_seeds() + + # Remove irrelevant fields + seeds_1 = clean_seeds(seeds_1) + seeds_2 = clean_seeds(seeds_2) + + # Sort the seeds by value + seeds_1 = sorted(seeds_1, key=lambda k: k["value"]) + seeds_2 = sorted(seeds_2, key=lambda k: k["value"]) + + # Compare the seeds + difference = diff(seeds_1, seeds_2, syntax="symmetric") + + # If there is no difference, print a message + if not difference: + print("Both workspaces have the same seeds.") + return None + + # Print the difference + print(json.dumps(difference, indent=4)) + + return difference + + +def compare_object_storage_assets( + workspace_1_api_key: str, workspace_2_api_key: str +) -> Optional[dict]: + """Compare the object storage assets of two workspaces. + + Args: + workspace_1_api_key (str): The first workspace API key to use. + workspace_2_api_key (str): The second workspace API key to use. + + Returns: + Optional[dict]: The difference between the two workspaces. + """ + # Create the clients + object_storage_assets_client_1 = ObjectStorageAssets(workspace_1_api_key) + object_storage_assets_client_2 = ObjectStorageAssets(workspace_2_api_key) + + # Get the object storage assets from both workspaces + object_storage_assets_1 = list(object_storage_assets_client_1.get_assets()) + object_storage_assets_2 = list(object_storage_assets_client_2.get_assets()) + + # Compare the assets + difference = diff(object_storage_assets_1, object_storage_assets_2) + + # If there is no difference, print a message + if not difference: + print("Both workspaces have the same object storage assets.") + return None + + # Print the difference + print(json.dumps(difference, indent=4)) + + return difference + + +def main() -> None: + """The main function.""" + # Parse the arguments + args = parse_args() + + # Compare the seeds + compare_seeds(args.workspace1_api_key, args.workspace2_api_key) + + # Compare the object storage assets + compare_object_storage_assets(args.workspace1_api_key, args.workspace2_api_key) + + +if __name__ == "__main__": + main() From b6022455ffaf2630114a7095ef7ab0a690424630 Mon Sep 17 00:00:00 2001 From: Aidan Holland Date: Wed, 28 Jun 2023 13:06:44 -0400 Subject: [PATCH 13/19] tests: update tests for aws credentials --- .../aws_connector/connector.py | 25 +- .../aws_connector/credentials.py | 21 +- .../aws_connector/settings.py | 2 +- tests/data/aws/accounts_inherit_role.yml | 14 ++ ...ml => accounts_inherit_role_and_creds.yml} | 2 + tests/data/aws/accounts_key.yml | 1 + tests/data/aws/accounts_override_invalid.yml | 18 ++ tests/data/aws/accounts_override_role.yml | 10 + tests/data/aws/ecs.yml | 4 +- tests/test_aws_connector.py | 238 +++++++++++++++++- tests/test_aws_credentials.py | 1 + tests/test_aws_settings.py | 16 -- 12 files changed, 315 insertions(+), 37 deletions(-) create mode 100644 tests/data/aws/accounts_inherit_role.yml rename tests/data/aws/{accounts_inherit.yml => accounts_inherit_role_and_creds.yml} (72%) create mode 100644 tests/data/aws/accounts_override_invalid.yml create mode 100644 tests/data/aws/accounts_override_role.yml diff --git a/src/censys/cloud_connectors/aws_connector/connector.py b/src/censys/cloud_connectors/aws_connector/connector.py index 5ae8025..765304c 100644 --- a/src/censys/cloud_connectors/aws_connector/connector.py +++ b/src/censys/cloud_connectors/aws_connector/connector.py @@ -75,6 +75,7 @@ def __init__(self, settings: Settings): async def scan( # type: ignore self, + account_number: str, provider_setting: AwsSpecificSettings, credentials: AwsCredentials, region: str, @@ -83,14 +84,13 @@ async def scan( # type: ignore """Scan AWS. Args: + account_number (str): AWS account number. provider_setting (AwsSpecificSettings): AWS provider settings. credentials (AwsCredentials): AWS credentials. region (str): AWS region. ignored_tags (list[str], optional): List of tags to ignore. Defaults to IGNORED_TAGS. """ - self.logger.info( - f"Scanning AWS account {self.account_number} in region {region}" - ) + self.logger.info(f"Scanning AWS account {account_number} in region {region}") await super().scan( provider_setting, credentials=credentials, @@ -118,21 +118,23 @@ async def scan_all(self): # Scan each account in the provider for account in accounts: + # Clear the ignored tags list + self.ignored_tags = [] # Use the account number from the account if it is configured if account is not None: self.account_number = account.account_number - self.ignored_tags = ( - self.get_ignored_tags(account.ignore_tags) - if account.ignore_tags - else self.get_ignored_tags(provider_setting.ignore_tags) - ) + if account.ignore_tags: + self.ignored_tags.extend(account.ignore_tags) # Use the account number from the provider if it is not configured else: self.account_number = provider_setting.account_number - self.ignored_tags = self.get_ignored_tags( - provider_setting.ignore_tags - ) + + # Add the provider level ignored tags + self.ignored_tags = self.get_ignored_tags( + [*provider_setting.ignore_tags, *self.ignored_tags] + ) + self.ignored_tags.sort() # TODO: Add support for global services @@ -154,6 +156,7 @@ async def scan_all(self): # Scan the account await self.scan( + self.account_number, provider_setting, credentials, region, diff --git a/src/censys/cloud_connectors/aws_connector/credentials.py b/src/censys/cloud_connectors/aws_connector/credentials.py index 1c992a1..77c835c 100644 --- a/src/censys/cloud_connectors/aws_connector/credentials.py +++ b/src/censys/cloud_connectors/aws_connector/credentials.py @@ -54,6 +54,7 @@ async def get_aws_credentials( # If the account has a role and credentials, assume it using the account credentials if account_has_credentials and account_has_role: + # TODO: This should never happen assert account.role_name assert account.role_session_name return await assume_role( @@ -126,6 +127,16 @@ async def get_aws_credentials( region=region, ) + # If the account has credentials, but no role, use them + if account_has_credentials: + assert account.access_key + assert account.secret_key + return { + "aws_access_key_id": account.access_key, + "aws_secret_access_key": account.secret_key, + "region_name": region, + } + # If neither the provider nor the account have credentials or roles, use local credentials return {} # pragma: no cover @@ -163,10 +174,12 @@ async def get_aws_credentials( if provider_has_credentials: assert provider_settings.access_key assert provider_settings.secret_key - credentials = { - "aws_access_key_id": provider_settings.access_key, - "aws_secret_access_key": provider_settings.secret_key, - } + credentials.update( + { + "aws_access_key_id": provider_settings.access_key, + "aws_secret_access_key": provider_settings.secret_key, + } + ) # If the provider has neither credentials nor a role, use local credentials return credentials diff --git a/src/censys/cloud_connectors/aws_connector/settings.py b/src/censys/cloud_connectors/aws_connector/settings.py index ee87921..cc52e05 100644 --- a/src/censys/cloud_connectors/aws_connector/settings.py +++ b/src/censys/cloud_connectors/aws_connector/settings.py @@ -25,7 +25,7 @@ class AwsAccount(BaseModel): secret_key: Optional[str] = Field(min_length=1) role_name: Optional[str] = Field(min_length=1) role_session_name: Optional[str] = Field(min_length=1) - ignore_tags: list[str] = Field(description="Tags to ignore", default=DEFAULT_IGNORE) + ignore_tags: list[str] = Field(description="Tags to ignore", default_factory=list) class AwsSpecificSettings(ProviderSpecificSettings): diff --git a/tests/data/aws/accounts_inherit_role.yml b/tests/data/aws/accounts_inherit_role.yml new file mode 100644 index 0000000..e420f4a --- /dev/null +++ b/tests/data/aws/accounts_inherit_role.yml @@ -0,0 +1,14 @@ +# Use local credentials to assume role into both the primary account (111111111111) +# and the secondary accounts (111111111112 and 111111111113). +- provider: AWS + account_number: 111111111111 + # These are the role name and session name used to assume role into each account including the primary account + role_name: test-primary-role-name + role_session_name: test-primary-role-session-name + ignore_tags: + - test-primary-ignore-tag-1 + regions: + - test-region + accounts: + - account_number: 111111111112 + - account_number: 111111111113 diff --git a/tests/data/aws/accounts_inherit.yml b/tests/data/aws/accounts_inherit_role_and_creds.yml similarity index 72% rename from tests/data/aws/accounts_inherit.yml rename to tests/data/aws/accounts_inherit_role_and_creds.yml index 754b1d1..201d868 100644 --- a/tests/data/aws/accounts_inherit.yml +++ b/tests/data/aws/accounts_inherit_role_and_creds.yml @@ -1,7 +1,9 @@ - provider: AWS account_number: 111111111111 + # These keys are used to assume role into each account access_key: test-primary-access-key secret_key: test-primary-secret-key + # These are the role name and session name used to assume role into each account role_name: test-primary-role-name role_session_name: test-primary-role-session-name ignore_tags: diff --git a/tests/data/aws/accounts_key.yml b/tests/data/aws/accounts_key.yml index bc40847..dff7fd6 100644 --- a/tests/data/aws/accounts_key.yml +++ b/tests/data/aws/accounts_key.yml @@ -1,3 +1,4 @@ +# The root access key is not overridden by the child account. - provider: AWS account_number: 111111111111 access_key: example-access-key-1 diff --git a/tests/data/aws/accounts_override_invalid.yml b/tests/data/aws/accounts_override_invalid.yml new file mode 100644 index 0000000..5d682a4 --- /dev/null +++ b/tests/data/aws/accounts_override_invalid.yml @@ -0,0 +1,18 @@ +- provider: AWS + account_number: 111111111111 + access_key: test-primary-access-key + secret_key: test-primary-secret-key + role_name: test-primary-role-name + role_session_name: test-primary-role-session-name + ignore_tags: + - test-primary-ignore-tag-1 + regions: + - test-region + accounts: + - account_number: 111111111112 + access_key: test-secondary-access-key + secret_key: test-secondary-secret-key + role_name: test-override-role + role_session_name: test-override-session-name + ignore_tags: + - test-override-ignore-tag-1 diff --git a/tests/data/aws/accounts_override_role.yml b/tests/data/aws/accounts_override_role.yml new file mode 100644 index 0000000..9a6cbbd --- /dev/null +++ b/tests/data/aws/accounts_override_role.yml @@ -0,0 +1,10 @@ +- provider: AWS + account_number: 111111111111 + role_name: test-primary-role-name + role_session_name: test-primary-role-session-name + regions: + - test-region + accounts: + - account_number: 111111111112 + role_name: test-override-role + role_session_name: test-override-session-name diff --git a/tests/data/aws/ecs.yml b/tests/data/aws/ecs.yml index fb84b9b..db530ed 100644 --- a/tests/data/aws/ecs.yml +++ b/tests/data/aws/ecs.yml @@ -1,10 +1,10 @@ - provider: AWS account_number: 111111111111 - role_name: example-role-1 + role_name: example-role-1 # Role to assume in parent account role_session_name: censys-cloud-connector regions: - test-region accounts: - account_number: 111111111112 - role_name: example-role-2 + role_name: example-role-2 # IAM role in child account role_session_name: censys-cloud-connector diff --git a/tests/test_aws_connector.py b/tests/test_aws_connector.py index 9c026e9..eb29392 100644 --- a/tests/test_aws_connector.py +++ b/tests/test_aws_connector.py @@ -1,12 +1,16 @@ import json -from typing import Any +from typing import Any, Optional, TypedDict from unittest.mock import MagicMock, call import asynctest from asynctest import TestCase from botocore.exceptions import ClientError +from parameterized import parameterized -from censys.cloud_connectors.aws_connector.connector import AwsCloudConnector +from censys.cloud_connectors.aws_connector.connector import ( + IGNORED_TAGS, + AwsCloudConnector, +) from censys.cloud_connectors.aws_connector.credentials import ( AwsCredentials, get_aws_credentials, @@ -19,6 +23,20 @@ from tests.base_connector_case import BaseConnectorCase +class ExpectedScan(TypedDict, total=False): + """Expected scan details.""" + + account_number: str + access_key: Optional[str] + secret_key: Optional[str] + sts_client_access_key: Optional[str] + sts_client_secret_key: Optional[str] + role_name: Optional[str] + role_session_name: Optional[str] + ignored_tags: Optional[list[str]] + regions: list[str] + + class TestAwsConnector(BaseConnectorCase, TestCase): connector: AwsCloudConnector connector_cls = AwsCloudConnector @@ -125,7 +143,6 @@ async def test_scan_all(self): # Mock scan mock_healthcheck = self.mock_healthcheck() - # mock_scan = self.mocker.patch.object(self.connector, "scan") with asynctest.patch.object(self.connector, "scan") as mock_scan: # Actual call await self.connector.scan_all() @@ -135,6 +152,221 @@ async def test_scan_all(self): assert mock_scan.call_count == expected_calls self.assert_healthcheck_called(mock_healthcheck, expected_calls) + def get_settings_file(self, file_name) -> list[AwsSpecificSettings]: + """Read a test providers.yml file. + + Args: + file_name (str): Filename. + + Returns: + list[AwsSpecificSettings]: List of AWS provider settings. + """ + # Clear existing settings + self.settings.providers.clear() + + # Read test settings + self.settings.providers_config_file = self.shared_datadir / "aws" / file_name + self.settings.read_providers_config_file([ProviderEnum.AWS]) + + # Get settings + provider_settings = self.settings.providers[ProviderEnum.AWS] + settings: list[AwsSpecificSettings] = list(provider_settings.values()) # type: ignore + return settings + + def build_credentials( + self, scan: ExpectedScan, region: Optional[str] = None + ) -> AwsCredentials: + """Build credentials from scan data. + + Args: + scan (ExpectedScan): Scan data. + region (Optional[str], optional): Region. Defaults to None. + + Returns: + AwsCredentials: AWS credentials. + """ + credentials: AwsCredentials = {} + if access_key := scan.get("access_key"): + credentials["aws_access_key_id"] = access_key + if secret_key := scan.get("secret_key"): + credentials["aws_secret_access_key"] = secret_key + if scan.get("role_name"): + credentials["aws_access_key_id"] = "example-access-key-assumed-1" + credentials["aws_secret_access_key"] = "example-secret-key-assumed-1" + credentials["aws_session_token"] = "example-session-token-assumed-1" + if region: + credentials["region_name"] = region + return credentials + + @parameterized.expand( + [ + ( + "accounts_inherit_role_and_creds.yml", + [ + { + "account_number": "111111111111", + "sts_client_access_key": "example-access-key-1", + "sts_client_secret_key": "example-secret-key-1", + "role_name": "test-primary-role-name", + "ignored_tags": ["test-primary-ignore-tag-1"], + "regions": ["test-region"], + }, + { + "account_number": "111111111112", + "sts_client_access_key": "example-access-key-1", + "sts_client_secret_key": "example-secret-key-1", + "role_name": "example-role-2", + "ignored_tags": ["test-primary-ignore-tag-1"], + "regions": ["test-region"], + }, + { + "account_number": "111111111113", + "sts_client_access_key": "example-access-key-1", + "sts_client_secret_key": "example-secret-key-1", + "role_name": "example-role-3", + "ignored_tags": ["test-primary-ignore-tag-1"], + "regions": ["test-region"], + }, + ], + ), + ( + "accounts_inherit_role.yml", + [ + { + "account_number": "111111111111", + "role_name": "test-primary-role-name", + "ignored_tags": ["test-primary-ignore-tag-1"], + "regions": ["test-region"], + }, + { + "account_number": "111111111112", + "role_name": "test-primary-role-name", + "ignored_tags": ["test-primary-ignore-tag-1"], + "regions": ["test-region"], + }, + { + "account_number": "111111111113", + "role_name": "test-primary-role-name", + "ignored_tags": ["test-primary-ignore-tag-1"], + "regions": ["test-region"], + }, + ], + ), + ( + "accounts_key.yml", + [ + { + "account_number": "111111111111", + "access_key": "example-access-key-1", + "secret_key": "example-secret-key-1", + "regions": ["test-region"], + }, + { + "account_number": "111111111112", + "access_key": "example-access-key-2", + "secret_key": "example-secret-key-2", + "regions": ["test-region"], + }, + ], + ), + ( + "accounts_override_role.yml", + [ + { + "account_number": "111111111111", + "role_name": "test-primary-role-name", + "regions": ["test-region"], + }, + { + "account_number": "111111111112", + "role_name": "test-override-role", + "regions": ["test-region"], + }, + ], + ), + ( + "accounts_override.yml", + [ + { + "account_number": "111111111111", + "sts_client_access_key": "test-primary-access-key", + "sts_client_secret_key": "test-primary-secret-key", + "role_name": "test-primary-role-name", + "ignored_tags": [ + "test-primary-ignore-tag-1", + ], + "regions": ["test-region"], + }, + { + "account_number": "111111111112", + "sts_client_access_key": "test-primary-access-key", + "sts_client_secret_key": "test-primary-secret-key", + "role_name": "test-override-role", + "ignored_tags": [ + "test-override-ignore-tag-1", + "test-primary-ignore-tag-1", + ], + "regions": ["test-region"], + }, + ], + ), + ] + ) + async def test_scan_all_with_providers_yaml( + self, providers_file: str, scans: list[ExpectedScan] + ): + # Test data + self.get_settings_file(providers_file) + + def assume_role_static( + account_number: str, + role_name: str, + role_session_name: str, + access_key: Optional[str] = None, + secret_key: Optional[str] = None, + region: Optional[str] = None, + ) -> AwsCredentials: + return { + "aws_access_key_id": "example-access-key-assumed-1", + "aws_secret_access_key": "example-secret-key-assumed-1", + "aws_session_token": "example-session-token-assumed-1", + "region_name": region, + } + + # Mock + mock_assume_role = self.mocker.patch( + "censys.cloud_connectors.aws_connector.credentials.assume_role", + new_callable=asynctest.CoroutineMock, + ) + mock_assume_role.side_effect = assume_role_static + mock_healthcheck = self.mock_healthcheck() + with asynctest.patch.object( + self.connector, "scan", new_callable=asynctest.CoroutineMock + ) as mock_scan: + mock_scan: asynctest.CoroutineMock # type: ignore[no-redef] + # Actual call + await self.connector.scan_all() + + print(mock_scan.call_args_list) + + # Assertions + expected_calls = 0 + for scan in scans: + for region in scan["regions"]: + credentials: AwsCredentials = self.build_credentials(scan, region) + scan_ignored_tags = scan.get("ignored_tags") or [] + ignored_tags = [*IGNORED_TAGS, *scan_ignored_tags] + ignored_tags.sort() + mock_scan.assert_any_await( + scan["account_number"], + self.connector.provider_settings, + credentials, + region, + ignored_tags=ignored_tags, + ) + expected_calls += 1 + self.assert_healthcheck_called(mock_healthcheck, expected_calls) + # TODO test multiple account_numbers with multiple regions # TODO test single account_number with multiple regions diff --git a/tests/test_aws_credentials.py b/tests/test_aws_credentials.py index 43ff176..4302a8e 100644 --- a/tests/test_aws_credentials.py +++ b/tests/test_aws_credentials.py @@ -20,6 +20,7 @@ class TestAwsCredentials(BaseCase, TestCase): @parameterized.expand( [ ( + # Account has access key and secret key and specifies a role to assume "account-creds-assume-account-role", {"region_name": "us-east-1"}, { diff --git a/tests/test_aws_settings.py b/tests/test_aws_settings.py index e2ef6c4..88e2361 100644 --- a/tests/test_aws_settings.py +++ b/tests/test_aws_settings.py @@ -6,7 +6,6 @@ AwsAccount, AwsSpecificSettings, ) -from censys.cloud_connectors.common.enums import ProviderEnum from censys.cloud_connectors.common.settings import Settings from tests.base_case import BaseCase @@ -47,21 +46,6 @@ def aws_settings(self, overrides: dict) -> AwsSpecificSettings: **settings, ) - def get_settings_file(self, file_name) -> list[AwsSpecificSettings]: - """Read a test providers.yml file. - - Args: - file_name (str): Filename. - - Returns: - list[AwsSpecificSettings]: List of AWS provider settings. - """ - self.settings.providers_config_file = self.shared_datadir / "aws" / file_name - self.settings.read_providers_config_file([ProviderEnum.AWS]) - provider_settings = self.settings.providers[ProviderEnum.AWS] - settings: list[AwsSpecificSettings] = list(provider_settings.values()) # type: ignore - return settings - def test_missing_role_and_access_key(self): with pytest.raises(ValueError, match="Specify either access_key"): AwsSpecificSettings( # type: ignore[call-arg] From cafaa4e26656f55ad559941a67ebf2f1c939c654 Mon Sep 17 00:00:00 2001 From: Aidan Holland Date: Wed, 28 Jun 2023 13:12:18 -0400 Subject: [PATCH 14/19] fix: remove invalid use case --- .../aws_connector/credentials.py | 14 --------- .../aws_connector/settings.py | 22 ++++++++++++++ tests/test_aws_credentials.py | 30 ------------------- 3 files changed, 22 insertions(+), 44 deletions(-) diff --git a/src/censys/cloud_connectors/aws_connector/credentials.py b/src/censys/cloud_connectors/aws_connector/credentials.py index 77c835c..686551c 100644 --- a/src/censys/cloud_connectors/aws_connector/credentials.py +++ b/src/censys/cloud_connectors/aws_connector/credentials.py @@ -52,20 +52,6 @@ async def get_aws_credentials( account_has_credentials = bool(account.access_key and account.secret_key) account_has_role = bool(account.role_name and account.role_session_name) - # If the account has a role and credentials, assume it using the account credentials - if account_has_credentials and account_has_role: - # TODO: This should never happen - assert account.role_name - assert account.role_session_name - return await assume_role( - account.account_number, - account.role_name, - account.role_session_name, - access_key=account.access_key, - secret_key=account.secret_key, - region=region, - ) - # If the provider has a role and the account has credentials, assume it using the account credentials if provider_has_role and account_has_credentials: assert provider_settings.role_name diff --git a/src/censys/cloud_connectors/aws_connector/settings.py b/src/censys/cloud_connectors/aws_connector/settings.py index cc52e05..821a061 100644 --- a/src/censys/cloud_connectors/aws_connector/settings.py +++ b/src/censys/cloud_connectors/aws_connector/settings.py @@ -27,6 +27,28 @@ class AwsAccount(BaseModel): role_session_name: Optional[str] = Field(min_length=1) ignore_tags: list[str] = Field(description="Tags to ignore", default_factory=list) + @root_validator + def validate_account_numbers(cls, values: dict[str, Any]) -> dict: + """Validate. + + Args: + values (dict): Settings + + Raises: + ValueError: Invalid settings. + + Returns: + dict: Settings + """ + has_key = values.get("access_key") and values.get("secret_key") + has_role = values.get("role_name") + has_both = has_key and has_role + + if has_both: + raise ValueError(AwsMessages.KEY_OR_ROLE_REQUIRED.value) + + return values + class AwsSpecificSettings(ProviderSpecificSettings): """AWS specific settings.""" diff --git a/tests/test_aws_credentials.py b/tests/test_aws_credentials.py index 4302a8e..b2fe859 100644 --- a/tests/test_aws_credentials.py +++ b/tests/test_aws_credentials.py @@ -19,36 +19,6 @@ class TestAwsCredentials(BaseCase, TestCase): @parameterized.expand( [ - ( - # Account has access key and secret key and specifies a role to assume - "account-creds-assume-account-role", - {"region_name": "us-east-1"}, - { - "account_number": "123123123123", - "access_key": None, - "secret_key": None, - "role_name": "test-role-name-settings", - "role_session_name": "test-role-session-name-settings", - }, - ( - "321321321321", - "test-role-name-account", - "test-role-session-name-account", - ), - { - "access_key": "xxxxxxxxxxxxxxxxxxxx", - "secret_key": "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx", - "region": "us-east-1", - }, - AwsAccount( - account_number=AwsAccountNumber("321321321321"), - access_key="xxxxxxxxxxxxxxxxxxxx", - secret_key="xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx", - role_name="test-role-name-account", - role_session_name="test-role-session-name-account", - ), - "us-east-1", - ), ( "account-creds-assume-settings-role", {"region_name": "us-east-1"}, From 3ea6eb0f6c7744911a986a8be3cfc369b5cf8924 Mon Sep 17 00:00:00 2001 From: Aidan Holland Date: Wed, 28 Jun 2023 13:12:38 -0400 Subject: [PATCH 15/19] chore: clean up comments --- src/censys/cloud_connectors/azure_connector/connector.py | 1 - src/censys/cloud_connectors/common/cli/args.py | 1 - src/censys/cloud_connectors/common/cli/provider_setup.py | 1 - 3 files changed, 3 deletions(-) diff --git a/src/censys/cloud_connectors/azure_connector/connector.py b/src/censys/cloud_connectors/azure_connector/connector.py index a9f3703..140f6fb 100644 --- a/src/censys/cloud_connectors/azure_connector/connector.py +++ b/src/censys/cloud_connectors/azure_connector/connector.py @@ -262,7 +262,6 @@ async def get_dns_records( async for zone in self._list_dns_zones(dns_client): zone_dict = zone.as_dict() - # TODO: Do we need to check if zone is public? (ie. do we care?) if zone_dict.get("zone_type") != "Public": # pragma: no cover continue zone_resource_group = zone_dict.get("id").split("/")[4] diff --git a/src/censys/cloud_connectors/common/cli/args.py b/src/censys/cloud_connectors/common/cli/args.py index 19d8e9f..3a23afc 100644 --- a/src/censys/cloud_connectors/common/cli/args.py +++ b/src/censys/cloud_connectors/common/cli/args.py @@ -30,7 +30,6 @@ def print_help(_: argparse.Namespace): for command in commands.__dict__.values(): try: - # FIXME: This is weird and doesn't just include the commands include_func = command.include_cli except AttributeError: continue diff --git a/src/censys/cloud_connectors/common/cli/provider_setup.py b/src/censys/cloud_connectors/common/cli/provider_setup.py index 1316020..4d1a773 100644 --- a/src/censys/cloud_connectors/common/cli/provider_setup.py +++ b/src/censys/cloud_connectors/common/cli/provider_setup.py @@ -264,7 +264,6 @@ def prompt_for_settings(self) -> ProviderSpecificSettings: question["type"] = "input" question["message"] = "Enter a " + question["message"] # type: ignore - # TODO: Is this something we want? if "secret" in field.name.lower(): question["type"] = "password" elif lenient_issubclass(field_type, Path): From 0bd9d7aea4c8469a7dff2d53b70e6b3359a4c651 Mon Sep 17 00:00:00 2001 From: Aidan Holland Date: Wed, 5 Jul 2023 11:37:24 -0400 Subject: [PATCH 16/19] chore: add vscode launch --- .gitignore | 5 ++++- .vscode/launch.json | 48 +++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 52 insertions(+), 1 deletion(-) create mode 100644 .vscode/launch.json diff --git a/.gitignore b/.gitignore index b990310..aa0d644 100644 --- a/.gitignore +++ b/.gitignore @@ -187,7 +187,10 @@ terraform.rc # IDE-maintained configuration .idea/ -.vscode/launch.json +.vscode/* +!.vscode/settings.json +!.vscode/extensions.json +!.vscode/launch.json # Builds *.zip diff --git a/.vscode/launch.json b/.vscode/launch.json new file mode 100644 index 0000000..128424a --- /dev/null +++ b/.vscode/launch.json @@ -0,0 +1,48 @@ +{ + // Use IntelliSense to learn about possible attributes. + // Hover to view descriptions of existing attributes. + // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 + "version": "0.2.0", + "configurations": [ + { + "name": "Censys CC Scan Debug", + "type": "python", + "request": "launch", + "program": "${workspaceFolder}/.venv/bin/censys-cc", + "console": "integratedTerminal", + "envFile": "${workspaceFolder}/.env", + "justMyCode": false, + "args": ["scan"] + }, + { + "name": "Censys CC Scan Debug (AWS)", + "type": "python", + "request": "launch", + "program": "${workspaceFolder}/.venv/bin/censys-cc", + "console": "integratedTerminal", + "envFile": "${workspaceFolder}/.env", + "justMyCode": false, + "args": ["scan", "-p", "aws"] + }, + { + "name": "Censys CC Scan Debug (Azure)", + "type": "python", + "request": "launch", + "program": "${workspaceFolder}/.venv/bin/censys-cc", + "console": "integratedTerminal", + "envFile": "${workspaceFolder}/.env", + "justMyCode": false, + "args": ["scan", "-p", "azure"] + }, + { + "name": "Censys CC Scan Debug (GCP)", + "type": "python", + "request": "launch", + "program": "${workspaceFolder}/.venv/bin/censys-cc", + "console": "integratedTerminal", + "envFile": "${workspaceFolder}/.env", + "justMyCode": false, + "args": ["scan", "-p", "gcp"] + } + ] +} \ No newline at end of file From e33dcf4559c19c8dd2bb0c69c0a8fa40d4d5e77a Mon Sep 17 00:00:00 2001 From: Aidan Holland Date: Wed, 5 Jul 2023 11:38:26 -0400 Subject: [PATCH 17/19] chore: print differences --- scripts/compare_workspaces.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/scripts/compare_workspaces.py b/scripts/compare_workspaces.py index 822c6ce..2ea5787 100644 --- a/scripts/compare_workspaces.py +++ b/scripts/compare_workspaces.py @@ -4,7 +4,6 @@ # It will print out the differences between the two workspaces. import argparse -import json from typing import Optional from jsondiff import diff @@ -89,7 +88,8 @@ def compare_seeds(workspace_1_api_key: str, workspace_2_api_key: str) -> Optiona return None # Print the difference - print(json.dumps(difference, indent=4)) + print("The difference between the two workspaces' seeds is:") + print(difference) return difference @@ -123,7 +123,8 @@ def compare_object_storage_assets( return None # Print the difference - print(json.dumps(difference, indent=4)) + print("The difference between the two workspaces' object storage assets is:") + print(difference) return difference From e53977c95cb514755436776ef97da742f7d53f88 Mon Sep 17 00:00:00 2001 From: Aidan Holland Date: Wed, 5 Jul 2023 11:44:03 -0400 Subject: [PATCH 18/19] chore: add workspace comparison docker-compose --- docker-compose.compare.yml | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) create mode 100644 docker-compose.compare.yml diff --git a/docker-compose.compare.yml b/docker-compose.compare.yml new file mode 100644 index 0000000..0d9372d --- /dev/null +++ b/docker-compose.compare.yml @@ -0,0 +1,21 @@ +# docker-compose -f docker-compose.compare.yml up +version: "3.8" + +services: + # Workspace 1 + censys_cloud_connector_1: + image: gcr.io/censys-io/censys-cloud-connector:v3.1.3 + volumes: + - ./providers.compare.yml:/app/providers.yml + - ./secrets:/app/secrets + environment: + CENSYS_API_KEY: ${CENSYS_API_KEY_1} + + # Workspace 2 + censys_cloud_connector_2: + image: gcr.io/censys-io/censys-cloud-connector:latest + volumes: + - ./providers.compare.yml:/app/providers.yml + - ./secrets:/app/secrets + environment: + CENSYS_API_KEY: ${CENSYS_API_KEY_2} From 6a8bc6ff79d510d35b487fe7d5131d01bbb0f76c Mon Sep 17 00:00:00 2001 From: Aidan Holland Date: Thu, 6 Jul 2023 12:46:11 -0400 Subject: [PATCH 19/19] chore: remove extra script and docker-compose --- docker-compose.compare.yml | 21 ----- poetry.lock | 2 +- pyproject.toml | 2 - scripts/compare_workspaces.py | 145 ---------------------------------- 4 files changed, 1 insertion(+), 169 deletions(-) delete mode 100644 docker-compose.compare.yml delete mode 100644 scripts/compare_workspaces.py diff --git a/docker-compose.compare.yml b/docker-compose.compare.yml deleted file mode 100644 index 0d9372d..0000000 --- a/docker-compose.compare.yml +++ /dev/null @@ -1,21 +0,0 @@ -# docker-compose -f docker-compose.compare.yml up -version: "3.8" - -services: - # Workspace 1 - censys_cloud_connector_1: - image: gcr.io/censys-io/censys-cloud-connector:v3.1.3 - volumes: - - ./providers.compare.yml:/app/providers.yml - - ./secrets:/app/secrets - environment: - CENSYS_API_KEY: ${CENSYS_API_KEY_1} - - # Workspace 2 - censys_cloud_connector_2: - image: gcr.io/censys-io/censys-cloud-connector:latest - volumes: - - ./providers.compare.yml:/app/providers.yml - - ./secrets:/app/secrets - environment: - CENSYS_API_KEY: ${CENSYS_API_KEY_2} diff --git a/poetry.lock b/poetry.lock index 0dfb6d1..e8b718e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -5995,4 +5995,4 @@ testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more [metadata] lock-version = "2.0" python-versions = ">=3.9,<4.0" -content-hash = "b3f8ce2f1be49abb37fb4f9218c7972785c7c3529df98c043fc5b0e4fb055175" +content-hash = "a64bc771edc8f2598120f128fa21e3692f83d620a1478929d7d0313261a533f5" diff --git a/pyproject.toml b/pyproject.toml index f445acd..09ea4ce 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -83,7 +83,6 @@ pytest-cov = "^3.0.0" pytest-datadir = "^1.3.1" pytest-mock = "^3.7.0" responses = "^0.21.0" -jsondiff = "^2.0.0" [tool.poetry.group.docs] optional = true @@ -152,7 +151,6 @@ module = [ # Common "InquirerPy.*", "importlib_metadata", - "jsondiff", # Tests "parameterized", "asynctest", diff --git a/scripts/compare_workspaces.py b/scripts/compare_workspaces.py deleted file mode 100644 index 2ea5787..0000000 --- a/scripts/compare_workspaces.py +++ /dev/null @@ -1,145 +0,0 @@ -#!/usr/bin/env python3 - -# This is a script to compare two workspaces' seeds and cloud assets. -# It will print out the differences between the two workspaces. - -import argparse -from typing import Optional - -from jsondiff import diff - -from censys.asm import Seeds -from censys.asm.assets import ObjectStorageAssets - - -def parse_args() -> argparse.Namespace: - """Parse the command line arguments. - - Returns: - argparse.Namespace: The parsed arguments. - """ - parser = argparse.ArgumentParser(description="Compare two workspaces.") - parser.add_argument( - "--workspace1", - type=str, - required=True, - help="The first workspace API key to use.", - dest="workspace1_api_key", - ) - parser.add_argument( - "--workspace2", - type=str, - required=True, - help="The second workspace API key to use.", - dest="workspace2_api_key", - ) - return parser.parse_args() - - -def clean_seeds(seeds: list) -> list: - """Remove the ID and createdOn field from the seeds, as it is not relevant to the comparison. - - Args: - seeds (list): The list of seeds to clean. - - Returns: - list: The cleaned list of seeds. - """ - for seed in seeds: - if "id" in seed: - del seed["id"] - if "createdOn" in seed: - del seed["createdOn"] - return seeds - - -def compare_seeds(workspace_1_api_key: str, workspace_2_api_key: str) -> Optional[dict]: - """Compare the seeds of two workspaces. - - Args: - workspace_1_api_key (str): The first workspace API key to use. - workspace_2_api_key (str): The second workspace API key to use. - - Returns: - Optional[dict]: The difference between the two workspaces. - """ - # Create the clients - seed_client_1 = Seeds(workspace_1_api_key) - seed_client_2 = Seeds(workspace_2_api_key) - - # Get the seeds from both workspaces - seeds_1 = seed_client_1.get_seeds() - seeds_2 = seed_client_2.get_seeds() - - # Remove irrelevant fields - seeds_1 = clean_seeds(seeds_1) - seeds_2 = clean_seeds(seeds_2) - - # Sort the seeds by value - seeds_1 = sorted(seeds_1, key=lambda k: k["value"]) - seeds_2 = sorted(seeds_2, key=lambda k: k["value"]) - - # Compare the seeds - difference = diff(seeds_1, seeds_2, syntax="symmetric") - - # If there is no difference, print a message - if not difference: - print("Both workspaces have the same seeds.") - return None - - # Print the difference - print("The difference between the two workspaces' seeds is:") - print(difference) - - return difference - - -def compare_object_storage_assets( - workspace_1_api_key: str, workspace_2_api_key: str -) -> Optional[dict]: - """Compare the object storage assets of two workspaces. - - Args: - workspace_1_api_key (str): The first workspace API key to use. - workspace_2_api_key (str): The second workspace API key to use. - - Returns: - Optional[dict]: The difference between the two workspaces. - """ - # Create the clients - object_storage_assets_client_1 = ObjectStorageAssets(workspace_1_api_key) - object_storage_assets_client_2 = ObjectStorageAssets(workspace_2_api_key) - - # Get the object storage assets from both workspaces - object_storage_assets_1 = list(object_storage_assets_client_1.get_assets()) - object_storage_assets_2 = list(object_storage_assets_client_2.get_assets()) - - # Compare the assets - difference = diff(object_storage_assets_1, object_storage_assets_2) - - # If there is no difference, print a message - if not difference: - print("Both workspaces have the same object storage assets.") - return None - - # Print the difference - print("The difference between the two workspaces' object storage assets is:") - print(difference) - - return difference - - -def main() -> None: - """The main function.""" - # Parse the arguments - args = parse_args() - - # Compare the seeds - compare_seeds(args.workspace1_api_key, args.workspace2_api_key) - - # Compare the object storage assets - compare_object_storage_assets(args.workspace1_api_key, args.workspace2_api_key) - - -if __name__ == "__main__": - main()